am 6216c4ac: (-s ours) am a9f1a9f9: (-s ours) am 705405df: (-s ours) am a3d541b5: (-s ours) Merge "DO NOT MERGE ANYWHERE: bumping cts to 5.1r3" into lollipop-mr1-cts-dev

* commit '6216c4acbbe36b7c14f20ab757948dfe53243211':
  DO NOT MERGE ANYWHERE: bumping cts to 5.1r3
diff --git a/CtsBuild.mk b/CtsBuild.mk
index 1b12293..c745885 100644
--- a/CtsBuild.mk
+++ b/CtsBuild.mk
@@ -17,7 +17,7 @@
 # Functions to get the paths of the build outputs.
 
 define cts-get-lib-paths
-	$(foreach lib,$(1),$(HOST_OUT_JAVA_LIBRARIES)/$(lib).jar)
+	$(foreach lib,$(1),$(CTS_TESTCASES_OUT)/$(lib).jar)
 endef
 
 define cts-get-ui-lib-paths
@@ -25,7 +25,7 @@
 endef
 
 define cts-get-native-paths
-	$(foreach exe,$(1),$(call intermediates-dir-for,EXECUTABLES,$(exe),,,$(3))/$(exe)$(2))
+	$(foreach exe,$(1),$(CTS_TESTCASES_OUT)/$(exe)$(2))
 endef
 
 define cts-get-package-paths
diff --git a/CtsTestCaseList.mk b/CtsTestCaseList.mk
index 6f475cc..4d30dda 100644
--- a/CtsTestCaseList.mk
+++ b/CtsTestCaseList.mk
@@ -19,6 +19,8 @@
     CtsDocumentClient \
     CtsExternalStorageApp \
     CtsInstrumentationAppDiffCert \
+    CtsUsePermissionApp \
+    CtsUsePermissionAppCompat \
     CtsPermissionDeclareApp \
     CtsPermissionDeclareAppCompat \
     CtsReadExternalStorageApp \
@@ -57,13 +59,33 @@
     CtsKeySetSigningBUpgradeB \
     CtsKeySetSigningAAndBUpgradeA \
     CtsKeySetSigningAAndCUpgradeB \
-    CtsKeySetSigningAUpgradeNone
+    CtsKeySetSigningAUpgradeNone \
+    CtsKeySetSharedUserSigningAUpgradeB \
+    CtsKeySetSharedUserSigningBUpgradeB \
+    CtsKeySetSigningABadUpgradeB \
+    CtsKeySetSigningCBadAUpgradeAB \
+    CtsKeySetSigningANoDefUpgradeB \
+    CtsKeySetSigningAUpgradeEcA \
+    CtsKeySetSigningEcAUpgradeA
+
+cts_account_support_packages := \
+    CtsUnaffiliatedAccountAuthenticators
 
 cts_support_packages := \
     CtsAccelerationTestStubs \
+    CtsAlarmClockService \
     CtsAppTestStubs \
+    CtsAssistService \
+    CtsAssistApp \
+    CtsAtraceTestApp \
+    CtsCertInstallerApp \
     CtsDeviceAdmin \
     CtsDeviceOpenGl \
+    CtsWifiConfigCreator \
+    CtsDeviceAndProfileOwnerApp \
+    CtsDeviceAppUsageTestApp \
+    CtsDeviceInfo \
+    CtsDeviceOsTestApp \
     CtsDeviceOwnerApp \
     CtsDeviceTaskswitchingAppA \
     CtsDeviceTaskswitchingAppB \
@@ -74,15 +96,23 @@
     CtsIntentSenderApp \
     CtsLauncherAppsTests \
     CtsLauncherAppsTestsSupport \
+    CtsLeanbackJank \
     CtsManagedProfileApp \
     CtsMonkeyApp \
     CtsMonkeyApp2 \
+    CtsPermissionApp \
     CtsSimpleApp \
+    CtsSimplePreMApp \
     CtsSomeAccessibilityServices \
     CtsThemeDeviceApp \
     TestDeviceSetup \
     CtsUiAutomatorApp \
     CtsUsbSerialTestApp \
+    CtsVoiceInteractionService \
+    CtsVoiceInteractionApp \
+    CtsVoiceSettingsService \
+    CtsWidgetProviderApp \
+    $(cts_account_support_packages) \
     $(cts_security_apps_list) \
     $(cts_security_keysets_list)
 
@@ -111,11 +141,14 @@
     CtsAccessibilityServiceTestCases \
     CtsAccessibilityTestCases \
     CtsAdminTestCases \
+    CtsAlarmClockTestCases \
     CtsAnimationTestCases \
     CtsAppTestCases \
     CtsAppWidgetTestCases \
+    CtsAssistTestCases \
     CtsBluetoothTestCases \
     CtsCalendarcommon2TestCases \
+    CtsCallLogTestCases \
     CtsContentTestCases \
     CtsDatabaseTestCases \
     CtsDisplayTestCases \
@@ -128,16 +161,21 @@
     CtsGraphicsTestCases \
     CtsGraphics2TestCases \
     CtsHardwareTestCases \
+    CtsJankTestCases \
+    CtsLeanbackJankTestCases \
     CtsJobSchedulerDeviceTestCases \
     CtsJniTestCases \
     CtsKeystoreTestCases \
+    CtsLibcoreLegacy22TestCases \
     CtsLocationTestCases \
     CtsLocation2TestCases \
     CtsMediaStressTestCases \
     CtsMediaTestCases \
+    CtsMidiTestCases \
     CtsNativeOpenGLTestCases \
     CtsNdefTestCases \
     CtsNetTestCases \
+    CtsNetTestCasesLegacyApi22 \
     CtsOpenGLTestCases \
     CtsOpenGlPerfTestCases \
     CtsOsTestCases \
@@ -154,18 +192,22 @@
     CtsSecurityTestCases \
     CtsSignatureTestCases \
     CtsSpeechTestCases \
+    CtsTelecomTestCases \
+    CtsTelecomTestCases2 \
     CtsTelephonyTestCases \
     CtsTextTestCases \
     CtsTextureViewTestCases \
     CtsThemeTestCases \
+    CtsTransitionTestCases \
     CtsTvTestCases \
     CtsUiAutomationTestCases \
     CtsUiRenderingTestCases \
     CtsUsageStatsTestCases \
     CtsUtilTestCases \
     CtsViewTestCases \
+    CtsVoiceInteractionTestCases \
+    CtsVoiceSettingsTestCases \
     CtsWebkitTestCases \
-    CtsWebGLTestCases \
     CtsWidgetTestCases
 
 # All APKs that need to be scanned by the coverage utilities.
@@ -177,6 +219,7 @@
 cts_host_libraries := \
     CtsAdbTests \
     CtsAppSecurityTests \
+    CtsAtraceHostTestCases \
     CtsDevicePolicyManagerTestCases \
     CtsDumpsysHostTestCases \
     CtsHostJank \
@@ -184,7 +227,9 @@
     CtsHostUi \
     CtsJdwpSecurityHostTestCases \
     CtsMonkeyTestCases \
+    CtsOsHostTestCases \
     CtsThemeHostTestCases \
+    CtsUsageHostTestCases \
     CtsSecurityHostTestCases \
     CtsUsbTests
 
@@ -205,16 +250,14 @@
 
 cts_device_jars := \
     CtsDeviceJank \
-    CtsJdwpApp \
-    CtsPrintInstrument
-
-cts_device_executables := \
-    print-instrument
+    CtsJdwpApp
 
 cts_target_junit_tests := \
     CtsJdwp
 
 cts_deqp_test_apis := \
+    egl \
+    gles2 \
     gles3 \
     gles31
 
@@ -225,14 +268,8 @@
     $(call cts-get-ui-lib-paths,$(cts_ui_tests)) \
     $(call cts-get-ui-lib-paths,$(cts_device_jars)) \
     $(call cts-get-ui-lib-paths,$(cts_target_junit_tests)) \
-    $(call cts-get-executable-paths,$(cts_device_executables))
-
-# NOTE: If compiling on a 64 bit target, TARGET_2ND_ARCH will be non-empty
-# and will cause the function to expand to the secondary arch object
-# directory. If compiling on a 32 bit target, TARGET_2ND_ARCH will be
-# empty and will cause the function to expand to the primary arch object
-# directory.
-CTS_TEST_CASES += $(call cts-get-native-paths,$(cts_native_tests),32,$(TARGET_2ND_ARCH))
+    $(call cts-get-executable-paths,$(cts_device_executables)) \
+    $(call cts-get-native-paths,$(cts_native_tests),32)
 
 ifeq ($(TARGET_IS_64_BIT),true)
 CTS_TEST_CASES += $(call cts-get-native-paths,$(cts_native_tests),64)
diff --git a/apps/CameraITS/CameraITS.pdf b/apps/CameraITS/CameraITS.pdf
index 2430420..8953af9 100644
--- a/apps/CameraITS/CameraITS.pdf
+++ b/apps/CameraITS/CameraITS.pdf
Binary files differ
diff --git a/apps/CameraITS/pymodules/its/caps.py b/apps/CameraITS/pymodules/its/caps.py
index 24f4e75..95f19d9 100644
--- a/apps/CameraITS/pymodules/its/caps.py
+++ b/apps/CameraITS/pymodules/its/caps.py
@@ -133,6 +133,17 @@
     """
     return len(its.objects.get_available_output_sizes("raw10", props)) > 0
 
+def raw12(props):
+    """Returns whether a device supports RAW12 output.
+
+    Args:
+        props: Camera properties object.
+
+    Returns:
+        Boolean.
+    """
+    return len(its.objects.get_available_output_sizes("raw12", props)) > 0
+
 def sensor_fusion(props):
     """Returns whether the camera and motion sensor timestamps for the device
     are in the same time domain and can be compared directly.
@@ -222,6 +233,109 @@
     return props.has_key("android.control.aeCompensationRange") and \
            props["android.control.aeCompensationRange"] != [0, 0]
 
+def ae_lock(props):
+    """Returns whether a device supports AE lock
+
+    Args:
+        props: Camera properties object.
+
+    Return:
+        Boolean.
+    """
+    return props.has_key("android.control.aeLockAvailable") and \
+           props["android.control.aeLockAvailable"] == 1
+
+def awb_lock(props):
+    """Returns whether a device supports AWB lock
+
+    Args:
+        props: Camera properties object.
+
+    Return:
+        Boolean.
+    """
+    return props.has_key("android.control.awbLockAvailable") and \
+           props["android.control.awbLockAvailable"] == 1
+
+def lsc_map(props):
+    """Returns whether a device supports lens shading map output
+
+    Args:
+        props: Camera properties object.
+
+    Return:
+        Boolean.
+    """
+    return props.has_key(
+            "android.statistics.info.availableLensShadingMapModes") and \
+        1 in props["android.statistics.info.availableLensShadingMapModes"]
+
+def lsc_off(props):
+    """Returns whether a device supports disabling lens shading correction
+
+    Args:
+        props: Camera properties object.
+
+    Return:
+        Boolean.
+    """
+    return props.has_key(
+            "android.shading.availableModes") and \
+        0 in props["android.shading.availableModes"]
+
+def yuv_reprocess(props):
+    """Returns whether a device supports YUV reprocessing.
+
+    Args:
+        props: Camera properties object.
+
+    Returns:
+        Boolean.
+    """
+    return props.has_key("android.request.availableCapabilities") and \
+           7 in props["android.request.availableCapabilities"]
+
+def private_reprocess(props):
+    """Returns whether a device supports PRIVATE reprocessing.
+
+    Args:
+        props: Camera properties object.
+
+    Returns:
+        Boolean.
+    """
+    return props.has_key("android.request.availableCapabilities") and \
+           4 in props["android.request.availableCapabilities"]
+
+def noise_reduction_mode(props, mode):
+    """Returns whether a device supports the noise reduction mode.
+
+    Args:
+        props: Camera properties objects.
+        mode: Integer, indicating the noise reduction mode to check for
+              availability.
+
+    Returns:
+        Boolean.
+    """
+    return props.has_key(
+            "android.noiseReduction.availableNoiseReductionModes") and mode \
+            in props["android.noiseReduction.availableNoiseReductionModes"];
+
+def edge_mode(props, mode):
+    """Returns whether a device supports the edge mode.
+
+    Args:
+        props: Camera properties objects.
+        mode: Integer, indicating the edge mode to check for availability.
+
+    Returns:
+        Boolean.
+    """
+    return props.has_key(
+            "android.edge.availableEdgeModes") and mode \
+            in props["android.edge.availableEdgeModes"];
+
 class __UnitTest(unittest.TestCase):
     """Run a suite of unit tests on this module.
     """
diff --git a/apps/CameraITS/pymodules/its/device.py b/apps/CameraITS/pymodules/its/device.py
index ad9786e..756f959 100644
--- a/apps/CameraITS/pymodules/its/device.py
+++ b/apps/CameraITS/pymodules/its/device.py
@@ -41,12 +41,21 @@
         sock: The open socket.
     """
 
-    # Open a connection to localhost:6000, forwarded to port 6000 on the device.
-    # TODO: Support multiple devices running over different TCP ports.
+    # Open a connection to localhost:<host_port>, forwarded to port 6000 on the
+    # device. <host_port> is determined at run-time to support multiple
+    # connected devices.
     IPADDR = '127.0.0.1'
-    PORT = 6000
+    REMOTE_PORT = 6000
     BUFFER_SIZE = 4096
 
+    # LOCK_PORT is used as a mutex lock to protect the list of forwarded ports
+    # among all processes. The script assumes LOCK_PORT is available and will
+    # try to use ports between CLIENT_PORT_START and
+    # CLIENT_PORT_START+MAX_NUM_PORTS-1 on host for ITS sessions.
+    CLIENT_PORT_START = 6000
+    MAX_NUM_PORTS = 100
+    LOCK_PORT = CLIENT_PORT_START + MAX_NUM_PORTS
+
     # Seconds timeout on each socket operation.
     SOCK_TIMEOUT = 10.0
     SEC_TO_NSEC = 1000*1000*1000.0
@@ -58,8 +67,8 @@
     EXTRA_SUCCESS = 'camera.its.extra.SUCCESS'
     EXTRA_SUMMARY = 'camera.its.extra.SUMMARY'
 
-    # TODO: Handle multiple connected devices.
-    ADB = "adb -d"
+    adb = "adb -d"
+    device_id = ""
 
     # Definitions for some of the common output format options for do_capture().
     # Each gets images of full resolution for each requested format.
@@ -75,12 +84,83 @@
     CAP_RAW_YUV_JPEG = [{"format":"raw"}, {"format":"yuv"}, {"format":"jpeg"}]
     CAP_DNG_YUV_JPEG = [{"format":"dng"}, {"format":"yuv"}, {"format":"jpeg"}]
 
-    # Method to handle the case where the service isn't already running.
-    # This occurs when a test is invoked directly from the command line, rather
-    # than as a part of a separate test harness which is setting up the device
-    # and the TCP forwarding.
-    def __pre_init(self):
+    # Initialize the socket port for the host to forward requests to the device.
+    # This method assumes localhost's LOCK_PORT is available and will try to
+    # use ports between CLIENT_PORT_START and CLIENT_PORT_START+MAX_NUM_PORTS-1
+    def __init_socket_port(self):
+        NUM_RETRIES = 100
+        RETRY_WAIT_TIME_SEC = 0.05
 
+        # Bind a socket to use as mutex lock
+        socket_lock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+        for i in range(NUM_RETRIES):
+            try:
+                socket_lock.bind((ItsSession.IPADDR, ItsSession.LOCK_PORT))
+                break
+            except socket.error:
+                if i == NUM_RETRIES - 1:
+                    raise its.error.Error(self.device_id,
+                                          "acquiring socket lock timed out")
+                else:
+                    time.sleep(RETRY_WAIT_TIME_SEC)
+
+        # Check if a port is already assigned to the device.
+        command = "adb forward --list"
+        proc = subprocess.Popen(command.split(), stdout=subprocess.PIPE)
+        output, error = proc.communicate()
+
+        port = None
+        used_ports = []
+        for line in output.split(os.linesep):
+            # each line should be formatted as:
+            # "<device_id> tcp:<host_port> tcp:<remote_port>"
+            forward_info = line.split()
+            if len(forward_info) >= 3 and \
+               len(forward_info[1]) > 4 and forward_info[1][:4] == "tcp:" and \
+               len(forward_info[2]) > 4 and forward_info[2][:4] == "tcp:":
+                local_p = int(forward_info[1][4:])
+                remote_p = int(forward_info[2][4:])
+                if forward_info[0] == self.device_id and \
+                   remote_p == ItsSession.REMOTE_PORT:
+                    port = local_p
+                    break;
+                else:
+                    used_ports.append(local_p)
+
+        # Find the first available port if no port is assigned to the device.
+        if port is None:
+            for p in range(ItsSession.CLIENT_PORT_START,
+                           ItsSession.CLIENT_PORT_START +
+                           ItsSession.MAX_NUM_PORTS):
+                if p not in used_ports:
+                    # Try to run "adb forward" with the port
+                    command = "%s forward tcp:%d tcp:%d" % \
+                              (self.adb, p, self.REMOTE_PORT)
+                    proc = subprocess.Popen(command.split(),
+                                            stdout=subprocess.PIPE,
+                                            stderr=subprocess.PIPE)
+                    output, error = proc.communicate()
+
+                    # Check if there is no error
+                    if error is None or error.find("error") < 0:
+                        port = p
+                        break
+
+        if port is None:
+            raise its.error.Error(self.device_id, " cannot find an available " +
+                                  "port")
+
+        # Release the socket as mutex unlock
+        socket_lock.close()
+
+        # Connect to the socket
+        self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+        self.sock.connect((self.IPADDR, port))
+        self.sock.settimeout(self.SOCK_TIMEOUT)
+
+    # Reboot the device if needed and wait for the service to be ready for
+    # connection.
+    def __wait_for_service(self):
         # This also includes the optional reboot handling: if the user
         # provides a "reboot" or "reboot=N" arg, then reboot the device,
         # waiting for N seconds (default 30) before returning.
@@ -90,19 +170,19 @@
                 if len(s) > 7 and s[6] == "=":
                     duration = int(s[7:])
                 print "Rebooting device"
-                _run("%s reboot" % (ItsSession.ADB));
-                _run("%s wait-for-device" % (ItsSession.ADB))
+                _run("%s reboot" % (self.adb));
+                _run("%s wait-for-device" % (self.adb))
                 time.sleep(duration)
                 print "Reboot complete"
 
         # TODO: Figure out why "--user 0" is needed, and fix the problem.
-        _run('%s shell am force-stop --user 0 %s' % (ItsSession.ADB, self.PACKAGE))
+        _run('%s shell am force-stop --user 0 %s' % (self.adb, self.PACKAGE))
         _run(('%s shell am startservice --user 0 -t text/plain '
-              '-a %s') % (ItsSession.ADB, self.INTENT_START))
+              '-a %s') % (self.adb, self.INTENT_START))
 
         # Wait until the socket is ready to accept a connection.
         proc = subprocess.Popen(
-                ItsSession.ADB.split() + ["logcat"],
+                self.adb.split() + ["logcat"],
                 stdout=subprocess.PIPE)
         logcat = proc.stdout
         while True:
@@ -111,15 +191,14 @@
                 break
         proc.kill()
 
-        # Setup the TCP-over-ADB forwarding.
-        _run('%s forward tcp:%d tcp:%d' % (ItsSession.ADB,self.PORT,self.PORT))
-
     def __init__(self):
-        if "noinit" not in sys.argv:
-            self.__pre_init()
-        self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-        self.sock.connect((self.IPADDR, self.PORT))
-        self.sock.settimeout(self.SOCK_TIMEOUT)
+        # Initialize device id and adb command.
+        self.device_id = get_device_id()
+        self.adb = "adb -s " + self.device_id
+
+        self.__wait_for_service()
+        self.__init_socket_port()
+
         self.__close_camera()
         self.__open_camera()
 
@@ -360,7 +439,7 @@
             raise its.error.Error('3A failed to converge')
         return ae_sens, ae_exp, awb_gains, awb_transform, af_dist
 
-    def do_capture(self, cap_request, out_surfaces=None):
+    def do_capture(self, cap_request, out_surfaces=None, reprocess_format=None):
         """Issue capture request(s), and read back the image(s) and metadata.
 
         The main top-level function for capturing one or more images using the
@@ -369,7 +448,7 @@
 
         The out_surfaces field can specify the width(s), height(s), and
         format(s) of the captured image. The formats may be "yuv", "jpeg",
-        "dng", "raw", or "raw10". The default is a YUV420 frame ("yuv")
+        "dng", "raw", "raw10", or "raw12". The default is a YUV420 frame ("yuv")
         corresponding to a full sensor frame.
 
         Note that one or more surfaces can be specified, allowing a capture to
@@ -379,6 +458,18 @@
         surface. At most one output surface can be specified for a given format,
         and raw+dng, raw10+dng, and raw+raw10 are not supported as combinations.
 
+        If reprocess_format is not None, for each request, an intermediate
+        buffer of the given reprocess_format will be captured from camera and
+        the intermediate buffer will be reprocessed to the output surfaces. The
+        following settings will be turned off when capturing the intermediate
+        buffer and will be applied when reprocessing the intermediate buffer.
+            1. android.noiseReduction.mode
+            2. android.edge.mode
+            3. android.reprocess.effectiveExposureFactor
+
+        Supported reprocess format are "yuv" and "private". Supported output
+        surface formats when reprocessing is enabled are "yuv" and "jpeg".
+
         Example of a single capture request:
 
             {
@@ -450,6 +541,8 @@
                 will be converted to JSON and sent to the device.
             out_surfaces: (Optional) specifications of the output image formats
                 and sizes to use for each capture.
+            reprocess_format: (Optional) The reprocessing format. If not None,
+                reprocessing will be enabled.
 
         Returns:
             An object, list of objects, or list of lists of objects, where each
@@ -461,7 +554,11 @@
             * metadata: the capture result object (Python dictionary).
         """
         cmd = {}
-        cmd["cmdName"] = "doCapture"
+        if reprocess_format != None:
+            cmd["cmdName"] = "doReprocessCapture"
+            cmd["reprocessFormat"] = reprocess_format
+        else:
+            cmd["cmdName"] = "doCapture"
         if not isinstance(cap_request, list):
             cmd["captureRequests"] = [cap_request]
         else:
@@ -540,10 +637,52 @@
         self.sock.settimeout(self.SOCK_TIMEOUT)
         return rets if len(rets)>1 else rets[0]
 
-def report_result(camera_id, success, summary_path=None):
+def get_device_id():
+    """ Return the ID of the device that the test is running on.
+
+    Return the device ID provided in the command line if it's connected. If no
+    device ID is provided in the command line and there is only one device
+    connected, return the device ID by parsing the result of "adb devices".
+
+    Raise an exception if no device is connected; or the device ID provided in
+    the command line is not connected; or no device ID is provided in the
+    command line and there are more than 1 device connected.
+
+    Returns:
+        Device ID string.
+    """
+    device_id = None
+    for s in sys.argv[1:]:
+        if s[:7] == "device=" and len(s) > 7:
+            device_id = str(s[7:])
+
+    # Get a list of connected devices
+    devices = []
+    command = "adb devices"
+    proc = subprocess.Popen(command.split(), stdout=subprocess.PIPE)
+    output, error = proc.communicate()
+    for line in output.split(os.linesep):
+        device_info = line.split()
+        if len(device_info) == 2 and device_info[1] == "device":
+            devices.append(device_info[0])
+
+    if len(devices) == 0:
+        raise its.error.Error("No device is connected!")
+    elif device_id is not None and device_id not in devices:
+        raise its.error.Error(device_id + " is not connected!")
+    elif device_id is None and len(devices) >= 2:
+        raise its.error.Error("More than 1 device are connected. " +
+                "Use device=<device_id> to specify a device to test.")
+    elif len(devices) == 1:
+        device_id = devices[0]
+
+    return device_id
+
+def report_result(device_id, camera_id, success, summary_path=None):
     """Send a pass/fail result to the device, via an intent.
 
     Args:
+        device_id: The ID string of the device to report the results to.
         camera_id: The ID string of the camera for which to report pass/fail.
         success: Boolean, indicating if the result was pass or fail.
         summary_path: (Optional) path to ITS summary file on host PC
@@ -551,18 +690,19 @@
     Returns:
         Nothing.
     """
+    adb = "adb -s " + device_id
     device_summary_path = "/sdcard/camera_" + camera_id + "_its_summary.txt"
     if summary_path is not None:
         _run("%s push %s %s" % (
-                ItsSession.ADB, summary_path, device_summary_path))
+                adb, summary_path, device_summary_path))
         _run("%s shell am broadcast -a %s --es %s %s --es %s %s --es %s %s" % (
-                ItsSession.ADB, ItsSession.ACTION_ITS_RESULT,
+                adb, ItsSession.ACTION_ITS_RESULT,
                 ItsSession.EXTRA_CAMERA_ID, camera_id,
                 ItsSession.EXTRA_SUCCESS, 'True' if success else 'False',
                 ItsSession.EXTRA_SUMMARY, device_summary_path))
     else:
         _run("%s shell am broadcast -a %s --es %s %s --es %s %s --es %s %s" % (
-                ItsSession.ADB, ItsSession.ACTION_ITS_RESULT,
+                adb, ItsSession.ACTION_ITS_RESULT,
                 ItsSession.EXTRA_CAMERA_ID, camera_id,
                 ItsSession.EXTRA_SUCCESS, 'True' if success else 'False',
                 ItsSession.EXTRA_SUMMARY, "null"))
diff --git a/apps/CameraITS/pymodules/its/image.py b/apps/CameraITS/pymodules/its/image.py
index b3bdb65..ea01a3e 100644
--- a/apps/CameraITS/pymodules/its/image.py
+++ b/apps/CameraITS/pymodules/its/image.py
@@ -64,11 +64,14 @@
     if cap["format"] == "raw10":
         assert(props is not None)
         cap = unpack_raw10_capture(cap, props)
+    if cap["format"] == "raw12":
+        assert(props is not None)
+        cap = unpack_raw12_capture(cap, props)
     if cap["format"] == "yuv":
         y = cap["data"][0:w*h]
         u = cap["data"][w*h:w*h*5/4]
         v = cap["data"][w*h*5/4:w*h*6/4]
-        return convert_yuv420_to_rgb_image(y, u, v, w, h)
+        return convert_yuv420_planar_to_rgb_image(y, u, v, w, h)
     elif cap["format"] == "jpeg":
         return decompress_jpeg_to_rgb_image(cap["data"])
     elif cap["format"] == "raw":
@@ -114,12 +117,12 @@
         raise its.error.Error('Invalid raw-10 buffer width')
     w = img.shape[1]*4/5
     h = img.shape[0]
-    # Cut out the 4x8b MSBs and shift to bits [10:2] in 16b words.
+    # Cut out the 4x8b MSBs and shift to bits [9:2] in 16b words.
     msbs = numpy.delete(img, numpy.s_[4::5], 1)
     msbs = msbs.astype(numpy.uint16)
     msbs = numpy.left_shift(msbs, 2)
     msbs = msbs.reshape(h,w)
-    # Cut out the 4x2b LSBs and put each in bits [2:0] of their own 8b words.
+    # Cut out the 4x2b LSBs and put each in bits [1:0] of their own 8b words.
     lsbs = img[::, 4::5].reshape(h,w/4)
     lsbs = numpy.right_shift(
             numpy.packbits(numpy.unpackbits(lsbs).reshape(h,w/4,4,2),3), 6)
@@ -128,6 +131,56 @@
     img16 = numpy.bitwise_or(msbs, lsbs).reshape(h,w)
     return img16
 
+def unpack_raw12_capture(cap, props):
+    """Unpack a raw-12 capture to a raw-16 capture.
+
+    Args:
+        cap: A raw-12 capture object.
+        props: Camera properties object.
+
+    Returns:
+        New capture object with raw-16 data.
+    """
+    # Data is packed as 4x10b pixels in 5 bytes, with the first 4 bytes holding
+    # the MSBs of the pixels, and the 5th byte holding 4x2b LSBs.
+    w,h = cap["width"], cap["height"]
+    if w % 2 != 0:
+        raise its.error.Error('Invalid raw-12 buffer width')
+    cap = copy.deepcopy(cap)
+    cap["data"] = unpack_raw12_image(cap["data"].reshape(h,w*3/2))
+    cap["format"] = "raw"
+    return cap
+
+def unpack_raw12_image(img):
+    """Unpack a raw-12 image to a raw-16 image.
+
+    Output image will have the 12 LSBs filled in each 16b word, and the 4 MSBs
+    will be set to zero.
+
+    Args:
+        img: A raw-12 image, as a uint8 numpy array.
+
+    Returns:
+        Image as a uint16 numpy array, with all row padding stripped.
+    """
+    if img.shape[1] % 3 != 0:
+        raise its.error.Error('Invalid raw-12 buffer width')
+    w = img.shape[1]*2/3
+    h = img.shape[0]
+    # Cut out the 2x8b MSBs and shift to bits [11:4] in 16b words.
+    msbs = numpy.delete(img, numpy.s_[2::3], 1)
+    msbs = msbs.astype(numpy.uint16)
+    msbs = numpy.left_shift(msbs, 4)
+    msbs = msbs.reshape(h,w)
+    # Cut out the 2x4b LSBs and put each in bits [3:0] of their own 8b words.
+    lsbs = img[::, 2::3].reshape(h,w/2)
+    lsbs = numpy.right_shift(
+            numpy.packbits(numpy.unpackbits(lsbs).reshape(h,w/2,2,4),3), 4)
+    lsbs = lsbs.reshape(h,w)
+    # Fuse the MSBs and LSBs back together
+    img16 = numpy.bitwise_or(msbs, lsbs).reshape(h,w)
+    return img16
+
 def convert_capture_to_planes(cap, props=None):
     """Convert a captured image object to separate image planes.
 
@@ -158,6 +211,9 @@
     if cap["format"] == "raw10":
         assert(props is not None)
         cap = unpack_raw10_capture(cap, props)
+    if cap["format"] == "raw12":
+        assert(props is not None)
+        cap = unpack_raw12_capture(cap, props)
     if cap["format"] == "yuv":
         y = cap["data"][0:w*h]
         u = cap["data"][w*h:w*h*5/4]
@@ -176,6 +232,36 @@
         img = numpy.ndarray(shape=(h*w,), dtype='<u2',
                             buffer=cap["data"][0:w*h*2])
         img = img.astype(numpy.float32).reshape(h,w) / white_level
+        # Crop the raw image to the active array region.
+        if props.has_key("android.sensor.info.activeArraySize") \
+                and props["android.sensor.info.activeArraySize"] is not None \
+                and props.has_key("android.sensor.info.pixelArraySize") \
+                and props["android.sensor.info.pixelArraySize"] is not None:
+            # Note that the Rect class is defined such that the left,top values
+            # are "inside" while the right,bottom values are "outside"; that is,
+            # it's inclusive of the top,left sides only. So, the width is
+            # computed as right-left, rather than right-left+1, etc.
+            wfull = props["android.sensor.info.pixelArraySize"]["width"]
+            hfull = props["android.sensor.info.pixelArraySize"]["height"]
+            xcrop = props["android.sensor.info.activeArraySize"]["left"]
+            ycrop = props["android.sensor.info.activeArraySize"]["top"]
+            wcrop = props["android.sensor.info.activeArraySize"]["right"]-xcrop
+            hcrop = props["android.sensor.info.activeArraySize"]["bottom"]-ycrop
+            assert(wfull >= wcrop >= 0)
+            assert(hfull >= hcrop >= 0)
+            assert(wfull - wcrop >= xcrop >= 0)
+            assert(hfull - hcrop >= ycrop >= 0)
+            if w == wfull and h == hfull:
+                # Crop needed; extract the center region.
+                img = img[ycrop:ycrop+hcrop,xcrop:xcrop+wcrop]
+                w = wcrop
+                h = hcrop
+            elif w == wcrop and h == hcrop:
+                # No crop needed; image is already cropped to the active array.
+                None
+            else:
+                raise its.error.Error('Invalid image size metadata')
+        # Separate the image planes.
         imgs = [img[::2].reshape(w*h/2)[::2].reshape(h/2,w/2,1),
                 img[::2].reshape(w*h/2)[1::2].reshape(h/2,w/2,1),
                 img[1::2].reshape(w*h/2)[::2].reshape(h/2,w/2,1),
@@ -285,10 +371,10 @@
     img = numpy.dot(img.reshape(w*h,3), ccm.T).reshape(h,w,3).clip(0.0,1.0)
     return img
 
-def convert_yuv420_to_rgb_image(y_plane, u_plane, v_plane,
-                                w, h,
-                                ccm_yuv_to_rgb=DEFAULT_YUV_TO_RGB_CCM,
-                                yuv_off=DEFAULT_YUV_OFFSETS):
+def convert_yuv420_planar_to_rgb_image(y_plane, u_plane, v_plane,
+                                       w, h,
+                                       ccm_yuv_to_rgb=DEFAULT_YUV_TO_RGB_CCM,
+                                       yuv_off=DEFAULT_YUV_OFFSETS):
     """Convert a YUV420 8-bit planar image to an RGB image.
 
     Args:
@@ -316,16 +402,44 @@
     rgb.reshape(w*h*3)[:] = flt.reshape(w*h*3)[:]
     return rgb.astype(numpy.float32) / 255.0
 
+def load_rgb_image(fname):
+    """Load a standard image file (JPG, PNG, etc.).
+
+    Args:
+        fname: The path of the file to load.
+
+    Returns:
+        RGB float-3 image array, with pixel values in [0.0, 1.0].
+    """
+    img = Image.open(fname)
+    w = img.size[0]
+    h = img.size[1]
+    a = numpy.array(img)
+    if len(a.shape) == 3 and a.shape[2] == 3:
+        # RGB
+        return a.reshape(h,w,3) / 255.0
+    elif len(a.shape) == 2 or len(a.shape) == 3 and a.shape[2] == 1:
+        # Greyscale; convert to RGB
+        return a.reshape(h*w).repeat(3).reshape(h,w,3) / 255.0
+    else:
+        raise its.error.Error('Unsupported image type')
+
 def load_yuv420_to_rgb_image(yuv_fname,
                              w, h,
+                             layout="planar",
                              ccm_yuv_to_rgb=DEFAULT_YUV_TO_RGB_CCM,
                              yuv_off=DEFAULT_YUV_OFFSETS):
     """Load a YUV420 image file, and return as an RGB image.
 
+    Supported layouts include "planar" and "nv21". The "yuv" formatted captures
+    returned from the device via do_capture are in the "planar" layout; other
+    layouts may only be needed for loading files from other sources.
+
     Args:
         yuv_fname: The path of the YUV420 file.
         w: The width of the image.
         h: The height of the image.
+        layout: (Optional) the layout of the YUV data (as a string).
         ccm_yuv_to_rgb: (Optional) the 3x3 CCM to convert from YUV to RGB.
         yuv_off: (Optional) offsets to subtract from each of Y,U,V values.
 
@@ -333,13 +447,24 @@
         RGB float-3 image array, with pixel values in [0.0, 1.0].
     """
     with open(yuv_fname, "rb") as f:
-        y = numpy.fromfile(f, numpy.uint8, w*h, "")
-        v = numpy.fromfile(f, numpy.uint8, w*h/4, "")
-        u = numpy.fromfile(f, numpy.uint8, w*h/4, "")
-        return convert_yuv420_to_rgb_image(y,u,v,w,h,ccm_yuv_to_rgb,yuv_off)
+        if layout == "planar":
+            # Plane of Y, plane of V, plane of U.
+            y = numpy.fromfile(f, numpy.uint8, w*h, "")
+            v = numpy.fromfile(f, numpy.uint8, w*h/4, "")
+            u = numpy.fromfile(f, numpy.uint8, w*h/4, "")
+        elif layout == "nv21":
+            # Plane of Y, plane of interleaved VUVUVU...
+            y = numpy.fromfile(f, numpy.uint8, w*h, "")
+            vu = numpy.fromfile(f, numpy.uint8, w*h/2, "")
+            v = vu[0::2]
+            u = vu[1::2]
+        else:
+            raise its.error.Error('Unsupported image layout')
+        return convert_yuv420_planar_to_rgb_image(
+                y,u,v,w,h,ccm_yuv_to_rgb,yuv_off)
 
-def load_yuv420_to_yuv_planes(yuv_fname, w, h):
-    """Load a YUV420 image file, and return separate Y, U, and V plane images.
+def load_yuv420_planar_to_yuv_planes(yuv_fname, w, h):
+    """Load a YUV420 planar image file, and return Y, U, and V plane images.
 
     Args:
         yuv_fname: The path of the YUV420 file.
@@ -540,169 +665,24 @@
     img = numpy.vstack(chs).T.reshape(h/f,w/f,chans)
     return img
 
-def __get_color_checker_patch(img, xc,yc, patch_size):
-    r = patch_size/2
-    tile = img[yc-r:yc+r:, xc-r:xc+r:, ::]
-    return tile
-
-def __measure_color_checker_patch(img, xc,yc, patch_size):
-    tile = __get_color_checker_patch(img, xc,yc, patch_size)
-    means = tile.mean(1).mean(0)
-    return means
-
-def get_color_checker_chart_patches(img, debug_fname_prefix=None):
-    """Return the center coords of each patch in a color checker chart.
-
-    Assumptions:
-    * Chart is vertical or horizontal w.r.t. camera, but not diagonal.
-    * Chart is (roughly) planar-parallel to the camera.
-    * Chart is centered in frame (roughly).
-    * Around/behind chart is white/grey background.
-    * The only black pixels in the image are from the chart.
-    * Chart is 100% visible and contained within image.
-    * No other objects within image.
-    * Image is well-exposed.
-    * Standard color checker chart with standard-sized black borders.
-
-    The values returned are in the coordinate system of the chart; that is,
-    patch (0,0) is the brown patch that is in the chart's top-left corner when
-    it is in the normal upright/horizontal orientation. (The chart may be any
-    of the four main orientations in the image.)
+def compute_image_sharpness(img):
+    """Calculate the sharpness of input image.
 
     Args:
-        img: Input image, as a numpy array with pixels in [0,1].
-        debug_fname_prefix: If not None, the (string) name of a file prefix to
-            use to save a number of debug images for visualizing the output of
-            this function; can be used to see if the patches are being found
-            successfully.
+        img: Numpy float RGB/luma image array, with pixel values in [0,1].
 
     Returns:
-        6x4 list of lists of integer (x,y) coords of the center of each patch,
-        ordered in the "chart order" (6x4 row major).
+        A sharpness estimation value based on the average of gradient magnitude.
+        Larger value means the image is sharper.
     """
+    chans = img.shape[2]
+    assert(chans == 1 or chans == 3)
+    luma = img
+    if (chans == 3):
+        luma = 0.299 * img[:,:,0] + 0.587 * img[:,:,1] + 0.114 * img[:,:,2]
 
-    # Shrink the original image.
-    DOWNSCALE_FACTOR = 4
-    img_small = downscale_image(img, DOWNSCALE_FACTOR)
-
-    # Make a threshold image, which is 1.0 where the image is black,
-    # and 0.0 elsewhere.
-    BLACK_PIXEL_THRESH = 0.2
-    mask_img = scipy.stats.threshold(
-                img_small.max(2), BLACK_PIXEL_THRESH, 1.1, 0.0)
-    mask_img = 1.0 - scipy.stats.threshold(mask_img, -0.1, 0.1, 1.0)
-
-    if debug_fname_prefix is not None:
-        h,w = mask_img.shape
-        write_image(img, debug_fname_prefix+"_0.jpg")
-        write_image(mask_img.repeat(3).reshape(h,w,3),
-                debug_fname_prefix+"_1.jpg")
-
-    # Mask image flattened to a single row or column (by averaging).
-    # Also apply a threshold to these arrays.
-    FLAT_PIXEL_THRESH = 0.05
-    flat_row = mask_img.mean(0)
-    flat_col = mask_img.mean(1)
-    flat_row = [0 if v < FLAT_PIXEL_THRESH else 1 for v in flat_row]
-    flat_col = [0 if v < FLAT_PIXEL_THRESH else 1 for v in flat_col]
-
-    # Start and end of the non-zero region of the flattened row/column.
-    flat_row_nonzero = [i for i in range(len(flat_row)) if flat_row[i]>0]
-    flat_col_nonzero = [i for i in range(len(flat_col)) if flat_col[i]>0]
-    flat_row_min, flat_row_max = min(flat_row_nonzero), max(flat_row_nonzero)
-    flat_col_min, flat_col_max = min(flat_col_nonzero), max(flat_col_nonzero)
-
-    # Orientation of chart, and number of grid cells horz. and vertically.
-    orient = "h" if flat_row_max-flat_row_min>flat_col_max-flat_col_min else "v"
-    xgrids = 6 if orient=="h" else 4
-    ygrids = 6 if orient=="v" else 4
-
-    # Get better bounds on the patches region, lopping off some of the excess
-    # black border.
-    HRZ_BORDER_PAD_FRAC = 0.0138
-    VERT_BORDER_PAD_FRAC = 0.0395
-    xpad = HRZ_BORDER_PAD_FRAC if orient=="h" else VERT_BORDER_PAD_FRAC
-    ypad = HRZ_BORDER_PAD_FRAC if orient=="v" else VERT_BORDER_PAD_FRAC
-    xchart = flat_row_min + (flat_row_max - flat_row_min) * xpad
-    ychart = flat_col_min + (flat_col_max - flat_col_min) * ypad
-    wchart = (flat_row_max - flat_row_min) * (1 - 2*xpad)
-    hchart = (flat_col_max - flat_col_min) * (1 - 2*ypad)
-
-    # Get the colors of the 4 corner patches, in clockwise order, by measuring
-    # the average value of a small patch at each of the 4 patch centers.
-    colors = []
-    centers = []
-    for (x,y) in [(0,0), (xgrids-1,0), (xgrids-1,ygrids-1), (0,ygrids-1)]:
-        xc = xchart + (x + 0.5)*wchart/xgrids
-        yc = ychart + (y + 0.5)*hchart/ygrids
-        xc = int(xc * DOWNSCALE_FACTOR + 0.5)
-        yc = int(yc * DOWNSCALE_FACTOR + 0.5)
-        centers.append((xc,yc))
-        chan_means = __measure_color_checker_patch(img, xc,yc, 32)
-        colors.append(sum(chan_means) / len(chan_means))
-
-    # The brightest corner is the white patch, the darkest is the black patch.
-    # The black patch should be counter-clockwise from the white patch.
-    white_patch_index = None
-    for i in range(4):
-        if colors[i] == max(colors) and \
-                colors[(i-1+4)%4] == min(colors):
-            white_patch_index = i%4
-    assert(white_patch_index is not None)
-
-    # Return the coords of the origin (top-left when the chart is in the normal
-    # upright orientation) patch's center, and the vector displacement to the
-    # center of the second patch on the first row of the chart (when in the
-    # normal upright orientation).
-    origin_index = (white_patch_index+1)%4
-    prev_index = (origin_index-1+4)%4
-    next_index = (origin_index+1)%4
-    origin_center = centers[origin_index]
-    prev_center = centers[prev_index]
-    next_center = centers[next_index]
-    vec_across = tuple([(next_center[i]-origin_center[i])/5.0 for i in [0,1]])
-    vec_down = tuple([(prev_center[i]-origin_center[i])/3.0 for i in [0,1]])
-
-    # Compute the center of each patch.
-    patches = [[],[],[],[]]
-    for yi in range(4):
-        for xi in range(6):
-            x0,y0 = origin_center
-            dxh,dyh = vec_across
-            dxv,dyv = vec_down
-            xc = int(x0 + dxh*xi + dxv*yi)
-            yc = int(y0 + dyh*xi + dyv*yi)
-            patches[yi].append((xc,yc))
-
-    # Sanity check: test that the R,G,B,black,white patches are correct.
-    sanity_failed = False
-    patch_info = [(2,2,[0]), # Red
-                  (2,1,[1]), # Green
-                  (2,0,[2]), # Blue
-                  (3,0,[0,1,2]), # White
-                  (3,5,[])] # Black
-    for i in range(len(patch_info)):
-        yi,xi,high_chans = patch_info[i]
-        low_chans = [i for i in [0,1,2] if i not in high_chans]
-        xc,yc = patches[yi][xi]
-        means = __measure_color_checker_patch(img, xc,yc, 64)
-        if (min([means[i] for i in high_chans]+[1]) < \
-                max([means[i] for i in low_chans]+[0])):
-            sanity_failed = True
-
-    if debug_fname_prefix is not None:
-        gridimg = numpy.zeros([4*(32+2), 6*(32+2), 3])
-        for yi in range(4):
-            for xi in range(6):
-                xc,yc = patches[yi][xi]
-                tile = __get_color_checker_patch(img, xc,yc, 32)
-                gridimg[yi*(32+2)+1:yi*(32+2)+1+32,
-                        xi*(32+2)+1:xi*(32+2)+1+32, :] = tile
-        write_image(gridimg, debug_fname_prefix+"_2.png")
-
-    assert(not sanity_failed)
-
-    return patches
+    [gy, gx] = numpy.gradient(luma)
+    return numpy.average(numpy.sqrt(gy*gy + gx*gx))
 
 class __UnitTest(unittest.TestCase):
     """Run a suite of unit tests on this module.
diff --git a/apps/CameraITS/pymodules/its/objects.py b/apps/CameraITS/pymodules/its/objects.py
index e1541a1..82346ec 100644
--- a/apps/CameraITS/pymodules/its/objects.py
+++ b/apps/CameraITS/pymodules/its/objects.py
@@ -70,7 +70,8 @@
     else:
         return float(r["numerator"]) / float(r["denominator"])
 
-def manual_capture_request(sensitivity, exp_time, linear_tonemap=False):
+def manual_capture_request(
+        sensitivity, exp_time, linear_tonemap=False, props=None):
     """Return a capture request with everything set to manual.
 
     Uses identity/unit color correction, and the default tonemap curve.
@@ -82,6 +83,9 @@
             with.
         linear_tonemap: [Optional] whether a linear tonemap should be used
             in this request.
+        props: [Optional] the object returned from
+            its.device.get_camera_properties(). Must present when
+            linear_tonemap is True.
 
     Returns:
         The default manual capture request, ready to be passed to the
@@ -105,10 +109,20 @@
         "android.shading.mode": 1
         }
     if linear_tonemap:
-        req["android.tonemap.mode"] = 0
-        req["android.tonemap.curveRed"] = [0.0,0.0, 1.0,1.0]
-        req["android.tonemap.curveGreen"] = [0.0,0.0, 1.0,1.0]
-        req["android.tonemap.curveBlue"] = [0.0,0.0, 1.0,1.0]
+        assert(props is not None)
+        #CONTRAST_CURVE mode
+        if 0 in props["android.tonemap.availableToneMapModes"]:
+            req["android.tonemap.mode"] = 0
+            req["android.tonemap.curveRed"] = [0.0,0.0, 1.0,1.0]
+            req["android.tonemap.curveGreen"] = [0.0,0.0, 1.0,1.0]
+            req["android.tonemap.curveBlue"] = [0.0,0.0, 1.0,1.0]
+        #GAMMA_VALUE mode
+        elif 3 in props["android.tonemap.availableToneMapModes"]:
+            req["android.tonemap.mode"] = 3
+            req["android.tonemap.gamma"] = 1.0
+        else:
+            print "Linear tonemap is not supported"
+            assert(False)
     return req
 
 def auto_capture_request():
@@ -142,13 +156,15 @@
     """Return a sorted list of available output sizes for a given format.
 
     Args:
-        fmt: the output format, as a string in ["jpg", "yuv", "raw"].
+        fmt: the output format, as a string in
+            ["jpg", "yuv", "raw", "raw10", "raw12"].
         props: the object returned from its.device.get_camera_properties().
 
     Returns:
         A sorted list of (w,h) tuples (sorted large-to-small).
     """
-    fmt_codes = {"raw":0x20, "raw10":0x25, "yuv":0x23, "jpg":0x100, "jpeg":0x100}
+    fmt_codes = {"raw":0x20, "raw10":0x25, "raw12":0x26, "yuv":0x23,
+                 "jpg":0x100, "jpeg":0x100}
     configs = props['android.scaler.streamConfigurationMap']\
                    ['availableStreamConfigurations']
     fmt_configs = [cfg for cfg in configs if cfg['format'] == fmt_codes[fmt]]
diff --git a/apps/CameraITS/tests/dng_noise_model/DngNoiseModel.pdf b/apps/CameraITS/tests/dng_noise_model/DngNoiseModel.pdf
index 01389fa..d979a06 100644
--- a/apps/CameraITS/tests/dng_noise_model/DngNoiseModel.pdf
+++ b/apps/CameraITS/tests/dng_noise_model/DngNoiseModel.pdf
Binary files differ
diff --git a/apps/CameraITS/tests/dng_noise_model/dng_noise_model.py b/apps/CameraITS/tests/dng_noise_model/dng_noise_model.py
index 19b6c92..8f4682a 100644
--- a/apps/CameraITS/tests/dng_noise_model/dng_noise_model.py
+++ b/apps/CameraITS/tests/dng_noise_model/dng_noise_model.py
@@ -13,144 +13,249 @@
 # limitations under the License.
 
 import its.device
+import its.caps
 import its.objects
 import its.image
-import pprint
-import pylab
 import os.path
+import pylab
 import matplotlib
-import matplotlib.pyplot
-import numpy
+import matplotlib.pyplot as plt
 import math
+import Image
+import time
+import numpy as np
+import scipy.stats
+import scipy.signal
+
+# Convert a 2D array a to a 4D array with dimensions [tile_size,
+# tile_size, row, col] where row, col are tile indices.
+def tile(a, tile_size):
+    tile_rows, tile_cols = a.shape[0]/tile_size, a.shape[1]/tile_size
+    a = a.reshape([tile_rows, tile_size, tile_cols, tile_size])
+    a = a.transpose([1, 3, 0, 2])
+    return a
 
 def main():
-    """Compute the DNG noise model from a color checker chart.
-
-    TODO: Make this more robust; some manual futzing may be needed.
+    """Capture a set of raw images with increasing gains and measure the noise.
     """
     NAME = os.path.basename(__file__).split(".")[0]
 
-    with its.device.ItsSession() as cam:
+    # How many sensitivities per stop to sample.
+    steps_per_stop = 2
+    # How large of tiles to use to compute mean/variance.
+    tile_size = 64
+    # Exposure bracketing range in stops
+    bracket_stops = 4
+    # How high to allow the mean of the tiles to go.
+    max_signal_level = 0.5
+    # Colors used for plotting the data for each exposure.
+    colors = 'rygcbm'
 
+    # Define a first order high pass filter to eliminate low frequency
+    # signal content when computing variance.
+    f = np.array([-1, 1]).astype('float32')
+    # Make it a higher order filter by convolving the first order
+    # filter with itself a few times.
+    f = np.convolve(f, f)
+    f = np.convolve(f, f)
+
+    # Compute the normalization of the filter to preserve noise
+    # power. Let a be the normalization factor we're looking for, and
+    # Let X and X' be the random variables representing the noise
+    # before and after filtering, respectively. First, compute
+    # Var[a*X']:
+    #
+    #   Var[a*X'] = a^2*Var[X*f_0 + X*f_1 + ... + X*f_N-1]
+    #             = a^2*(f_0^2*Var[X] + f_1^2*Var[X] + ... + (f_N-1)^2*Var[X])
+    #             = sum(f_i^2)*a^2*Var[X]
+    #
+    # We want Var[a*X'] to be equal to Var[X]:
+    #
+    #    sum(f_i^2)*a^2*Var[X] = Var[X] -> a = sqrt(1/sum(f_i^2))
+    #
+    # We can just bake this normalization factor into the high pass
+    # filter kernel.
+    f = f/math.sqrt(np.dot(f, f))
+
+    bracket_factor = math.pow(2, bracket_stops)
+
+    with its.device.ItsSession() as cam:
         props = cam.get_camera_properties()
 
-        white_level = float(props['android.sensor.info.whiteLevel'])
+        # Get basic properties we need.
+        sens_min, sens_max = props['android.sensor.info.sensitivityRange']
+        sens_max_analog = props['android.sensor.maxAnalogSensitivity']
+        white_level = props['android.sensor.info.whiteLevel']
         black_levels = props['android.sensor.blackLevelPattern']
         idxs = its.image.get_canonical_cfa_order(props)
         black_levels = [black_levels[i] for i in idxs]
 
-        # Expose for the scene with min sensitivity
-        sens_min, sens_max = props['android.sensor.info.sensitivityRange']
-        s_ae,e_ae,awb_gains,awb_ccm,_  = cam.do_3a(get_results=True)
-        s_e_prod = s_ae * e_ae
+        print "Sensitivity range: [%f, %f]" % (sens_min, sens_max)
+        print "Max analog sensitivity: %f" % (sens_max_analog)
 
-        # Make the image brighter since the script looks at linear Bayer
-        # raw patches rather than gamma-encoded YUV patches (and the AE
-        # probably under-exposes a little for this use-case).
-        s_e_prod *= 2
+        # Do AE to get a rough idea of where we are.
+        s_ae,e_ae,_,_,_  = \
+            cam.do_3a(get_results=True, do_awb=False, do_af=False)
+        # Underexpose to get more data for low signal levels.
+        auto_e = s_ae*e_ae/bracket_factor
 
-        # Capture raw frames across the full sensitivity range.
-        NUM_SENS_STEPS = 9
-        sens_step = int((sens_max - sens_min - 1) / float(NUM_SENS_STEPS))
-        reqs = []
-        sens = []
-        for s in range(sens_min, sens_max, sens_step):
-            e = int(s_e_prod / float(s))
-            req = its.objects.manual_capture_request(s, e)
-            req["android.colorCorrection.transform"] = \
-                    its.objects.float_to_rational(awb_ccm)
-            req["android.colorCorrection.gains"] = awb_gains
-            reqs.append(req)
-            sens.append(s)
+        # If the auto-exposure result is too bright for the highest
+        # sensitivity or too dark for the lowest sensitivity, report
+        # an error.
+        min_exposure_ns, max_exposure_ns = \
+            props['android.sensor.info.exposureTimeRange']
+        if auto_e < min_exposure_ns*sens_max:
+            raise its.error.Error("Scene is too bright to properly expose \
+                                  at the highest sensitivity")
+        if auto_e*bracket_factor > max_exposure_ns*sens_min:
+            raise its.error.Error("Scene is too dark to properly expose \
+                                  at the lowest sensitivity")
 
-        caps = cam.do_capture(reqs, cam.CAP_RAW)
+        # Start the sensitivities at the minimum.
+        s = sens_min
 
-        # A list of the (x,y) coords of the center pixel of a collection of
-        # patches of a color checker chart. Each patch should be uniform,
-        # however the actual color doesn't matter. Note that the coords are
-        # relative to the *converted* RGB image, which is 1/2 x 1/2 of the
-        # full size; convert back to full.
-        img = its.image.convert_capture_to_rgb_image(caps[0], props=props)
-        patches = its.image.get_color_checker_chart_patches(img, NAME+"_debug")
-        patches = [(2*x,2*y) for (x,y) in sum(patches,[])]
+        samples = []
+        plots = []
+        measured_models = []
+        while s <= sens_max + 1:
+            print "ISO %d" % round(s)
+            fig = plt.figure()
+            plt_s = fig.gca()
+            plt_s.set_title("ISO %d" % round(s))
+            plt_s.set_xlabel("Mean signal level")
+            plt_s.set_ylabel("Variance")
 
-        lines = []
-        for iouter, (s,cap) in enumerate(zip(sens,caps)):
-            # For each capture, compute the mean value in each patch, for each
-            # Bayer plane; discard patches where pixels are close to clamped.
-            # Also compute the variance.
-            CLAMP_THRESH = 0.2
-            planes = its.image.convert_capture_to_planes(cap, props)
-            points = []
-            for i,plane in enumerate(planes):
-                plane = (plane * white_level - black_levels[i]) / (
-                        white_level - black_levels[i])
-                for j,(x,y) in enumerate(patches):
-                    tile = plane[y/2-16:y/2+16:,x/2-16:x/2+16:,::]
-                    mean = its.image.compute_image_means(tile)[0]
-                    var = its.image.compute_image_variances(tile)[0]
-                    if (mean > CLAMP_THRESH and mean < 1.0-CLAMP_THRESH):
-                        # Each point is a (mean,variance) tuple for a patch;
-                        # for a given ISO, there should be a linear
-                        # relationship between these values.
-                        points.append((mean,var))
+            samples_s = []
+            for b in range(0, bracket_stops + 1):
+                # Get the exposure for this sensitivity and exposure time.
+                e = int(math.pow(2, b)*auto_e/float(s))
+                req = its.objects.manual_capture_request(round(s), e)
+                cap = cam.do_capture(req, cam.CAP_RAW)
+                planes = its.image.convert_capture_to_planes(cap, props)
 
-            # Fit a line to the points, with a line equation: y = mx + b.
-            # This line is the relationship between mean and variance (i.e.)
-            # between signal level and noise, for this particular sensor.
-            # In the DNG noise model, the gradient (m) is "S", and the offset
-            # (b) is "O".
-            points.sort()
-            xs = [x for (x,y) in points]
-            ys = [y for (x,y) in points]
-            m,b = numpy.polyfit(xs, ys, 1)
-            lines.append((s,m,b))
-            print s, "->", m, b
+                samples_e = []
+                for (pidx, p) in enumerate(planes):
+                    p = p.squeeze()
 
-            # TODO: Clean up these checks (which currently fail in some cases).
-            # Some sanity checks:
-            # * Noise levels should increase with brightness.
-            # * Extrapolating to a black image, the noise should be positive.
-            # Basically, the "b" value should correspond to the read noise,
-            # which is the noise level if the sensor was operating in zero
-            # light.
-            #assert(m > 0)
-            #assert(b >= 0)
+                    # Crop the plane to be a multiple of the tile size.
+                    p = p[0:p.shape[0] - p.shape[0]%tile_size, 
+                          0:p.shape[1] - p.shape[1]%tile_size]
 
-            if iouter == 0:
-                pylab.plot(xs, ys, 'r', label="Measured")
-                pylab.plot([0,xs[-1]],[b,m*xs[-1]+b],'b', label="Fit")
-            else:
-                pylab.plot(xs, ys, 'r')
-                pylab.plot([0,xs[-1]],[b,m*xs[-1]+b],'b')
+                    # convert_capture_to_planes normalizes the range
+                    # to [0, 1], but without subtracting the black
+                    # level.
+                    black_level = black_levels[pidx]
+                    p = p*white_level
+                    p = (p - black_level)/(white_level - black_level)
 
-        pylab.xlabel("Mean")
-        pylab.ylabel("Variance")
-        pylab.legend()
-        matplotlib.pyplot.savefig("%s_plot_mean_vs_variance.png" % (NAME))
+                    # Use our high pass filter to filter this plane.
+                    hp = scipy.signal.sepfir2d(p, f, f).astype('float32')
 
-        # Now fit a line across the (m,b) line parameters for each sensitivity.
-        # The gradient (m) params are fit to the "S" line, and the offset (b)
-        # params are fit to the "O" line, both as a function of sensitivity.
-        gains = [d[0] for d in lines]
-        Ss = [d[1] for d in lines]
-        Os = [d[2] for d in lines]
-        mS,bS = numpy.polyfit(gains, Ss, 1)
-        mO,bO = numpy.polyfit(gains, Os, 1)
+                    means_tiled = \
+                        np.mean(tile(p, tile_size), axis=(0, 1)).flatten()
+                    vars_tiled = \
+                        np.var(tile(hp, tile_size), axis=(0, 1)).flatten()
 
-        # Plot curve "O" as 10x, so it fits in the same scale as curve "S".
-        fig = matplotlib.pyplot.figure()
-        pylab.plot(gains, [10*o for o in Os], 'r', label="Measured")
-        pylab.plot([gains[0],gains[-1]],
-                [10*mO*gains[0]+10*bO, 10*mO*gains[-1]+10*bO],'r--',label="Fit")
-        pylab.plot(gains, Ss, 'b', label="Measured")
-        pylab.plot([gains[0],gains[-1]], [mS*gains[0]+bS,mS*gains[-1]+bS],'b--',
-                label="Fit")
-        pylab.xlabel("Sensitivity")
-        pylab.ylabel("Model parameter: S (blue), O x10 (red)")
-        pylab.legend()
-        matplotlib.pyplot.savefig("%s_plot_S_O.png" % (NAME))
+                    for (mean, var) in zip(means_tiled, vars_tiled):
+                        # Don't include the tile if it has samples that might 
+                        # be clipped.
+                        if mean + 2*math.sqrt(var) < max_signal_level:
+                            samples_e.append([mean, var])
 
+                    means_e, vars_e = zip(*samples_e)
+                    plt_s.plot(means_e, vars_e, colors[b%len(colors)] + ',')
+
+                    samples_s.extend(samples_e)
+
+            [S, O, R, p, stderr] = scipy.stats.linregress(samples_s)
+            measured_models.append([round(s), S, O])
+            print "Sensitivity %d: %e*y + %e (R=%f)" % (round(s), S, O, R)
+
+            # Add the samples for this sensitivity to the global samples list.
+            samples.extend([(round(s), mean, var) for (mean, var) in samples_s])
+
+            # Add the linear fit to the plot for this sensitivity.
+            plt_s.plot([0, max_signal_level], [O, O + S*max_signal_level], 'r-', 
+                       label="Linear fit")
+            xmax = max([x for (x, _) in samples_s])*1.25
+            plt_s.set_xlim(xmin=0, xmax=xmax)
+            plt_s.set_ylim(ymin=0, ymax=(O + S*xmax)*1.25)
+            fig.savefig("%s_samples_iso%04d.png" % (NAME, round(s)))
+            plots.append([round(s), fig])
+
+            # Move to the next sensitivity.
+            s = s*math.pow(2, 1.0/steps_per_stop)
+
+        # Grab the sensitivities and line parameters from each sensitivity.
+        S_measured = [e[1] for e in measured_models]
+        O_measured = [e[2] for e in measured_models]
+        sens = np.asarray([e[0] for e in measured_models])
+        sens_sq = np.square(sens)
+
+        # Use a global linear optimization to fit the noise model.
+        gains = np.asarray([s[0] for s in samples])
+        means = np.asarray([s[1] for s in samples])
+        vars_ = np.asarray([s[2] for s in samples])
+
+        # Define digital gain as the gain above the max analog gain
+        # per the Camera2 spec. Also, define a corresponding C
+        # expression snippet to use in the generated model code.
+        digital_gains = np.maximum(gains/sens_max_analog, 1)
+        digital_gain_cdef = "(sens / %d.0) < 1.0 ? 1.0 : (sens / %d.0)" % \
+            (sens_max_analog, sens_max_analog)
+
+        # Find the noise model parameters via least squares fit.
+        ad = gains*means
+        bd = means
+        cd = gains*gains
+        dd = digital_gains*digital_gains
+        a = np.asarray([ad, bd, cd, dd]).T
+        b = vars_
+
+        # To avoid overfitting to high ISOs (high variances), divide the system
+        # by the gains.
+        a = a/(np.tile(gains, (a.shape[1], 1)).T)
+        b = b/gains
+
+        [A, B, C, D], _, _, _ = np.linalg.lstsq(a, b)
+
+        # Plot the noise model components with the values predicted by the 
+        # noise model.
+        S_model = A*sens + B
+        O_model = \
+            C*sens_sq + D*np.square(np.maximum(sens/sens_max_analog, 1))
+
+        (fig, (plt_S, plt_O)) = plt.subplots(2, 1)
+        plt_S.set_title("Noise model")
+        plt_S.set_ylabel("S")
+        plt_S.loglog(sens, S_measured, 'r+', basex=10, basey=10, 
+                     label="Measured")
+        plt_S.loglog(sens, S_model, 'bx', basex=10, basey=10, label="Model")
+        plt_S.legend(loc=2)
+
+        plt_O.set_xlabel("ISO")
+        plt_O.set_ylabel("O")
+        plt_O.loglog(sens, O_measured, 'r+', basex=10, basey=10, 
+                     label="Measured")
+        plt_O.loglog(sens, O_model, 'bx', basex=10, basey=10, label="Model")
+        fig.savefig("%s.png" % (NAME))
+
+        for [s, fig] in plots:
+            plt_s = fig.gca()
+
+            dg = max(s/sens_max_analog, 1)
+            S = A*s + B
+            O = C*s*s + D*dg*dg
+            plt_s.plot([0, max_signal_level], [O, O + S*max_signal_level], 'b-', 
+                       label="Model")
+            plt_s.legend(loc=2)
+
+            plt.figure(fig.number)
+
+            # Re-save the plot with the global model.
+            fig.savefig("%s_samples_iso%04d.png" % (NAME, round(s)))
+
+        # Generate the noise model implementation.
         print """
         /* Generated test code to dump a table of data for external validation
          * of the noise model parameters.
@@ -176,11 +281,13 @@
             double s = %e * sens + %e;
             return s < 0.0 ? 0.0 : s;
         }
+
         double compute_noise_model_entry_O(int sens) {
-            double o = %e * sens + %e;
+            double digital_gain = %s;
+            double o = %e * sens * sens + %e * digital_gain * digital_gain;
             return o < 0.0 ? 0.0 : o;
         }
-        """%(sens_min,sens_max,mS,bS,mO,bO)
+        """ % (sens_min, sens_max, A, B, digital_gain_cdef, C, D)
 
 if __name__ == '__main__':
     main()
diff --git a/apps/CameraITS/tests/inprog/test_burst_sameness_auto.py b/apps/CameraITS/tests/inprog/test_burst_sameness_auto.py
index 87500c7..6629df8 100644
--- a/apps/CameraITS/tests/inprog/test_burst_sameness_auto.py
+++ b/apps/CameraITS/tests/inprog/test_burst_sameness_auto.py
@@ -39,7 +39,8 @@
 
         # Capture at the smallest resolution.
         props = cam.get_camera_properties()
-        its.caps.skip_unless(its.caps.manual_sensor(props))
+        its.caps.skip_unless(its.caps.manual_sensor(props) and
+                             its.caps.awb_lock(props))
 
         _, fmt = its.objects.get_fastest_manual_capture_settings(props)
         w,h = fmt["width"], fmt["height"]
diff --git a/apps/CameraITS/tests/inprog/test_burst_sameness_fullres_auto.py b/apps/CameraITS/tests/inprog/test_burst_sameness_fullres_auto.py
index 932c051..fa37174 100644
--- a/apps/CameraITS/tests/inprog/test_burst_sameness_fullres_auto.py
+++ b/apps/CameraITS/tests/inprog/test_burst_sameness_fullres_auto.py
@@ -15,6 +15,7 @@
 import its.image
 import its.device
 import its.objects
+import its.caps
 import os.path
 import numpy
 import pylab
@@ -41,6 +42,8 @@
 
         # Capture at full resolution.
         props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.manual_sensor(props) and
+                             its.caps.awb_lock(props))
         w,h = its.objects.get_available_output_sizes("yuv", props)[0]
 
         # Converge 3A prior to capture.
diff --git a/apps/CameraITS/tests/inprog/test_faces.py b/apps/CameraITS/tests/inprog/test_faces.py
deleted file mode 100644
index 228dac8..0000000
--- a/apps/CameraITS/tests/inprog/test_faces.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# Copyright 2014 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import its.image
-import its.device
-import its.objects
-import os.path
-
-def main():
-    """Test face detection.
-    """
-    NAME = os.path.basename(__file__).split(".")[0]
-
-    with its.device.ItsSession() as cam:
-        cam.do_3a()
-        req = its.objects.auto_capture_request()
-        req['android.statistics.faceDetectMode'] = 2
-        caps = cam.do_capture([req]*5)
-        for i,cap in enumerate(caps):
-            md = cap['metadata']
-            print "Frame %d face metadata:" % i
-            print "  Ids:", md['android.statistics.faceIds']
-            print "  Landmarks:", md['android.statistics.faceLandmarks']
-            print "  Rectangles:", md['android.statistics.faceRectangles']
-            print "  Scores:", md['android.statistics.faceScores']
-            print ""
-
-if __name__ == '__main__':
-    main()
-
diff --git a/apps/CameraITS/tests/scene1/test_crop_region_raw.py b/apps/CameraITS/tests/scene1/test_crop_region_raw.py
index 189e987..7973755 100644
--- a/apps/CameraITS/tests/scene1/test_crop_region_raw.py
+++ b/apps/CameraITS/tests/scene1/test_crop_region_raw.py
@@ -64,7 +64,7 @@
         # Use a manual request with a linear tonemap so that the YUV and RAW
         # should look the same (once converted by the its.image module).
         e, s = its.target.get_target_exposure_combos(cam)["minSensitivity"]
-        req = its.objects.manual_capture_request(s,e, True)
+        req = its.objects.manual_capture_request(s,e, True, props)
         cap1_raw, cap1_yuv = cam.do_capture(req, cam.CAP_RAW_YUV)
 
         # Capture with a crop region.
diff --git a/apps/CameraITS/tests/scene1/test_ev_compensation_advanced.py b/apps/CameraITS/tests/scene1/test_ev_compensation_advanced.py
index 9b43a74..5fd8f73 100644
--- a/apps/CameraITS/tests/scene1/test_ev_compensation_advanced.py
+++ b/apps/CameraITS/tests/scene1/test_ev_compensation_advanced.py
@@ -41,10 +41,12 @@
         range_max = ev_compensation_range[1]
         ev_per_step = its.objects.rational_to_float(
                 props['android.control.aeCompensationStep'])
-        steps_per_ev = int(1.0 / ev_per_step)
-        evs = range(range_min, range_max + 1, steps_per_ev)
+        steps_per_ev = int(round(1.0 / ev_per_step))
+        ev_steps = range(range_min, range_max + 1, steps_per_ev)
+        imid = len(ev_steps) / 2
+        ev_shifts = [pow(2, step * ev_per_step) for step in ev_steps]
         lumas = []
-        for ev in evs:
+        for ev in ev_steps:
             # Re-converge 3A, and lock AE once converged. skip AF trigger as
             # dark/bright scene could make AF convergence fail and this test
             # doesn't care the image sharpness.
@@ -65,19 +67,16 @@
             tile = its.image.get_image_patch(y, 0.45,0.45,0.1,0.1)
             lumas.append(its.image.compute_image_means(tile)[0])
 
-        luma_increase_per_step = pow(2, ev_per_step)
         print "ev_step_size_in_stops", ev_per_step
-        imid = len(lumas) / 2
-        expected_lumas = [lumas[imid] / pow(luma_increase_per_step, i)
-                          for i in range(imid , 0, -1)]  + \
-                         [lumas[imid] * pow(luma_increase_per_step, i-imid)
-                          for i in range(imid, len(evs))]
+        shift_mid = ev_shifts[imid]
+        luma_normal = lumas[imid] / shift_mid
+        expected_lumas = [luma_normal * ev_shift for ev_shift in ev_shifts]
 
-        pylab.plot(evs, lumas, 'r')
-        pylab.plot(evs, expected_lumas, 'b')
+        pylab.plot(ev_steps, lumas, 'r')
+        pylab.plot(ev_steps, expected_lumas, 'b')
         matplotlib.pyplot.savefig("%s_plot_means.png" % (NAME))
 
-        luma_diffs = [expected_lumas[i] - lumas[i] for i in range(len(evs))]
+        luma_diffs = [expected_lumas[i] - lumas[i] for i in range(len(ev_steps))]
         max_diff = max(abs(i) for i in luma_diffs)
         avg_diff = abs(numpy.array(luma_diffs)).mean()
         print "Max delta between modeled and measured lumas:", max_diff
diff --git a/apps/CameraITS/tests/scene1/test_ev_compensation_basic.py b/apps/CameraITS/tests/scene1/test_ev_compensation_basic.py
index d09f2fd..e3755eb 100644
--- a/apps/CameraITS/tests/scene1/test_ev_compensation_basic.py
+++ b/apps/CameraITS/tests/scene1/test_ev_compensation_basic.py
@@ -29,7 +29,8 @@
 
     with its.device.ItsSession() as cam:
         props = cam.get_camera_properties()
-        its.caps.skip_unless(its.caps.ev_compensation(props))
+        its.caps.skip_unless(its.caps.ev_compensation(props) and
+                             its.caps.ae_lock(props))
 
         ev_per_step = its.objects.rational_to_float(
                 props['android.control.aeCompensationStep'])
diff --git a/apps/CameraITS/tests/scene1/test_exposure.py b/apps/CameraITS/tests/scene1/test_exposure.py
index d217bdb..26c398d 100644
--- a/apps/CameraITS/tests/scene1/test_exposure.py
+++ b/apps/CameraITS/tests/scene1/test_exposure.py
@@ -36,11 +36,13 @@
     THRESHOLD_MIN_LEVEL = 0.1
     THRESHOLD_MAX_LEVEL = 0.9
     THRESHOLD_MAX_LEVEL_DIFF = 0.025
+    THRESHOLD_MAX_LEVEL_DIFF_WIDE_RANGE = 0.05
 
     mults = []
     r_means = []
     g_means = []
     b_means = []
+    threshold_max_level_diff = THRESHOLD_MAX_LEVEL_DIFF
 
     with its.device.ItsSession() as cam:
         props = cam.get_camera_properties()
@@ -57,13 +59,18 @@
             req = its.objects.manual_capture_request(s*m, e/m)
             cap = cam.do_capture(req)
             img = its.image.convert_capture_to_rgb_image(cap)
-            its.image.write_image(img, "%s_mult=%02d.jpg" % (NAME, m))
+            its.image.write_image(img, "%s_mult=%3.2f.jpg" % (NAME, m))
             tile = its.image.get_image_patch(img, 0.45, 0.45, 0.1, 0.1)
             rgb_means = its.image.compute_image_means(tile)
             r_means.append(rgb_means[0])
             g_means.append(rgb_means[1])
             b_means.append(rgb_means[2])
-            m = m + 4
+            # Test 3 steps per 2x gain
+            m = m * pow(2, 1.0 / 3)
+
+        # Allow more threshold for devices with wider exposure range
+        if m >= 64.0:
+            threshold_max_level_diff = THRESHOLD_MAX_LEVEL_DIFF_WIDE_RANGE
 
     # Draw a plot.
     pylab.plot(mults, r_means, 'r')
@@ -83,7 +90,7 @@
         max_diff = max_val - min_val
         print "Channel %d line fit (y = mx+b): m = %f, b = %f" % (chan, m, b)
         print "Channel max %f min %f diff %f" % (max_val, min_val, max_diff)
-        assert(max_diff < THRESHOLD_MAX_LEVEL_DIFF)
+        assert(max_diff < threshold_max_level_diff)
         assert(b > THRESHOLD_MIN_LEVEL and b < THRESHOLD_MAX_LEVEL)
         for v in values:
             assert(v > THRESHOLD_MIN_LEVEL and v < THRESHOLD_MAX_LEVEL)
diff --git a/apps/CameraITS/tests/scene1/test_jpeg.py b/apps/CameraITS/tests/scene1/test_jpeg.py
index 25c2038..7bc038d 100644
--- a/apps/CameraITS/tests/scene1/test_jpeg.py
+++ b/apps/CameraITS/tests/scene1/test_jpeg.py
@@ -33,7 +33,7 @@
                              its.caps.per_frame_control(props))
 
         e, s = its.target.get_target_exposure_combos(cam)["midExposureTime"]
-        req = its.objects.manual_capture_request(s, e, True)
+        req = its.objects.manual_capture_request(s, e, True, props)
 
         # YUV
         size = its.objects.get_available_output_sizes("yuv", props)[0]
diff --git a/apps/CameraITS/tests/scene1/test_latching.py b/apps/CameraITS/tests/scene1/test_latching.py
index 3bc4356..176f01b 100644
--- a/apps/CameraITS/tests/scene1/test_latching.py
+++ b/apps/CameraITS/tests/scene1/test_latching.py
@@ -45,20 +45,20 @@
         b_means = []
 
         reqs = [
-            its.objects.manual_capture_request(s,  e,   True),
-            its.objects.manual_capture_request(s,  e,   True),
-            its.objects.manual_capture_request(s*2,e,   True),
-            its.objects.manual_capture_request(s*2,e,   True),
-            its.objects.manual_capture_request(s,  e,   True),
-            its.objects.manual_capture_request(s,  e,   True),
-            its.objects.manual_capture_request(s,  e*2, True),
-            its.objects.manual_capture_request(s,  e,   True),
-            its.objects.manual_capture_request(s*2,e,   True),
-            its.objects.manual_capture_request(s,  e,   True),
-            its.objects.manual_capture_request(s,  e*2, True),
-            its.objects.manual_capture_request(s,  e,   True),
-            its.objects.manual_capture_request(s,  e*2, True),
-            its.objects.manual_capture_request(s,  e*2, True),
+            its.objects.manual_capture_request(s,  e,   True, props),
+            its.objects.manual_capture_request(s,  e,   True, props),
+            its.objects.manual_capture_request(s*2,e,   True, props),
+            its.objects.manual_capture_request(s*2,e,   True, props),
+            its.objects.manual_capture_request(s,  e,   True, props),
+            its.objects.manual_capture_request(s,  e,   True, props),
+            its.objects.manual_capture_request(s,  e*2, True, props),
+            its.objects.manual_capture_request(s,  e,   True, props),
+            its.objects.manual_capture_request(s*2,e,   True, props),
+            its.objects.manual_capture_request(s,  e,   True, props),
+            its.objects.manual_capture_request(s,  e*2, True, props),
+            its.objects.manual_capture_request(s,  e,   True, props),
+            its.objects.manual_capture_request(s,  e*2, True, props),
+            its.objects.manual_capture_request(s,  e*2, True, props),
             ]
 
         caps = cam.do_capture(reqs, fmt)
diff --git a/apps/CameraITS/tests/scene1/test_locked_burst.py b/apps/CameraITS/tests/scene1/test_locked_burst.py
index a33a9f8..6552c73 100644
--- a/apps/CameraITS/tests/scene1/test_locked_burst.py
+++ b/apps/CameraITS/tests/scene1/test_locked_burst.py
@@ -38,6 +38,8 @@
 
     with its.device.ItsSession() as cam:
         props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.ae_lock(props) and
+                             its.caps.awb_lock(props))
 
         # Converge 3A prior to capture.
         cam.do_3a(do_af=True, lock_ae=True, lock_awb=True)
diff --git a/apps/CameraITS/tests/scene1/test_param_color_correction.py b/apps/CameraITS/tests/scene1/test_param_color_correction.py
index b7fdc7b..09b3707 100644
--- a/apps/CameraITS/tests/scene1/test_param_color_correction.py
+++ b/apps/CameraITS/tests/scene1/test_param_color_correction.py
@@ -42,7 +42,7 @@
 
         # Baseline request
         e, s = its.target.get_target_exposure_combos(cam)["midSensitivity"]
-        req = its.objects.manual_capture_request(s, e, True)
+        req = its.objects.manual_capture_request(s, e, True, props)
         req["android.colorCorrection.mode"] = 0
 
         # Transforms:
diff --git a/apps/CameraITS/tests/scene1/test_param_exposure_time.py b/apps/CameraITS/tests/scene1/test_param_exposure_time.py
index e6078d9..0c0aab1 100644
--- a/apps/CameraITS/tests/scene1/test_param_exposure_time.py
+++ b/apps/CameraITS/tests/scene1/test_param_exposure_time.py
@@ -39,7 +39,7 @@
 
         e,s = its.target.get_target_exposure_combos(cam)["midExposureTime"]
         for i,e_mult in enumerate([0.8, 0.9, 1.0, 1.1, 1.2]):
-            req = its.objects.manual_capture_request(s, e * e_mult, True)
+            req = its.objects.manual_capture_request(s, e * e_mult, True, props)
             cap = cam.do_capture(req)
             img = its.image.convert_capture_to_rgb_image(cap)
             its.image.write_image(
diff --git a/apps/CameraITS/tests/scene1/test_param_flash_mode.py b/apps/CameraITS/tests/scene1/test_param_flash_mode.py
index aae56aa..38f864f 100644
--- a/apps/CameraITS/tests/scene1/test_param_flash_mode.py
+++ b/apps/CameraITS/tests/scene1/test_param_flash_mode.py
@@ -39,7 +39,7 @@
         # linear tonemap.
         e, s = its.target.get_target_exposure_combos(cam)["midExposureTime"]
         e /= 4
-        req = its.objects.manual_capture_request(s, e, True)
+        req = its.objects.manual_capture_request(s, e, True, props)
 
         for f in [0,1,2]:
             req["android.flash.mode"] = f
diff --git a/apps/CameraITS/tests/scene1/test_param_noise_reduction.py b/apps/CameraITS/tests/scene1/test_param_noise_reduction.py
index 219927d..35cfc07 100644
--- a/apps/CameraITS/tests/scene1/test_param_noise_reduction.py
+++ b/apps/CameraITS/tests/scene1/test_param_noise_reduction.py
@@ -17,10 +17,11 @@
 import its.device
 import its.objects
 import its.target
-import pylab
-import os.path
 import matplotlib
 import matplotlib.pyplot
+import numpy
+import os.path
+import pylab
 
 def main():
     """Test that the android.noiseReduction.mode param is applied when set.
@@ -34,6 +35,7 @@
     """
     NAME = os.path.basename(__file__).split(".")[0]
 
+    RELATIVE_ERROR_TOLERANCE = 0.1
     # List of variances for R,G,B.
     variances = [[],[],[]]
 
@@ -45,7 +47,8 @@
     with its.device.ItsSession() as cam:
         props = cam.get_camera_properties()
         its.caps.skip_unless(its.caps.compute_target_exposure(props) and
-                             its.caps.per_frame_control(props))
+                             its.caps.per_frame_control(props) and
+                             its.caps.noise_reduction_mode(props, 0))
 
         # NR mode 0 with low gain
         e, s = its.target.get_target_exposure_combos(cam)["minSensitivity"]
@@ -61,17 +64,24 @@
         print "Ref variances:", ref_variance
 
         e, s = its.target.get_target_exposure_combos(cam)["maxSensitivity"]
-        for i in range(3):
-            # NR modes 0, 1, 2 with high gain
+        # NR modes 0, 1, 2, 3, 4 with high gain
+        for mode in range(5):
+            # Skip unavailable modes
+            if not its.caps.noise_reduction_mode(props, mode):
+                nr_modes_reported.append(mode)
+                for channel in range(3):
+                    variances[channel].append(0)
+                continue;
+
             req = its.objects.manual_capture_request(s, e)
-            req["android.noiseReduction.mode"] = i
+            req["android.noiseReduction.mode"] = mode
             cap = cam.do_capture(req)
             rgb_image = its.image.convert_capture_to_rgb_image(cap)
             nr_modes_reported.append(
                     cap["metadata"]["android.noiseReduction.mode"])
             its.image.write_image(
                     rgb_image,
-                    "%s_high_gain_nr=%d.jpg" % (NAME, i))
+                    "%s_high_gain_nr=%d.jpg" % (NAME, mode))
             rgb_tile = its.image.get_image_patch(
                     rgb_image, 0.45, 0.45, 0.1, 0.1)
             rgb_vars = its.image.compute_image_variances(rgb_tile)
@@ -82,16 +92,36 @@
 
     # Draw a plot.
     for j in range(3):
-        pylab.plot(range(3), variances[j], "rgb"[j])
+        pylab.plot(range(5), variances[j], "rgb"[j])
     matplotlib.pyplot.savefig("%s_plot_variances.png" % (NAME))
 
-    assert(nr_modes_reported == [0,1,2])
+    assert(nr_modes_reported == [0,1,2,3,4])
 
-    # Check that the variance of the NR=0 image is higher than for the
-    # NR=1 and NR=2 images.
     for j in range(3):
-        for i in range(1,3):
-            assert(variances[j][i] < variances[j][0])
+        # Smaller variance is better
+        # Verify OFF(0) is not better than FAST(1)
+        assert(variances[j][0] >
+               variances[j][1] * (1.0 - RELATIVE_ERROR_TOLERANCE))
+        # Verify FAST(1) is not better than HQ(2)
+        assert(variances[j][1] >
+               variances[j][2] * (1.0 - RELATIVE_ERROR_TOLERANCE))
+        # Verify HQ(2) is better than OFF(0)
+        assert(variances[j][0] > variances[j][2])
+        if its.caps.noise_reduction_mode(props, 3):
+            # Verify OFF(0) is not better than MINIMAL(3)
+            assert(variances[j][0] >
+                   variances[j][3] * (1.0 - RELATIVE_ERROR_TOLERANCE))
+            # Verify MINIMAL(3) is not better than HQ(2)
+            assert(variances[j][3] >
+                   variances[j][2] * (1.0 - RELATIVE_ERROR_TOLERANCE))
+            if its.caps.noise_reduction_mode(props, 4):
+                # Verify ZSL(4) is close to MINIMAL(3)
+                assert(numpy.isclose(variances[j][4], variances[j][3],
+                                     RELATIVE_ERROR_TOLERANCE))
+        elif its.caps.noise_reduction_mode(props, 4):
+            # Verify ZSL(4) is close to OFF(0)
+            assert(numpy.isclose(variances[j][4], variances[j][0],
+                                 RELATIVE_ERROR_TOLERANCE))
 
 if __name__ == '__main__':
     main()
diff --git a/apps/CameraITS/tests/scene1/test_param_shading_mode.py b/apps/CameraITS/tests/scene1/test_param_shading_mode.py
new file mode 100644
index 0000000..8538675
--- /dev/null
+++ b/apps/CameraITS/tests/scene1/test_param_shading_mode.py
@@ -0,0 +1,115 @@
+# Copyright 2015 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.caps
+import its.device
+import its.objects
+import matplotlib
+import numpy
+import os
+import os.path
+import pylab
+
+def main():
+    """Test that the android.shading.mode param is applied.
+
+    Switching shading modes and checks that the lens shading maps are
+    modified as expected.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    NUM_SHADING_MODE_SWITCH_LOOPS = 3
+    THRESHOLD_DIFF_RATIO = 0.15
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+
+        its.caps.skip_unless(its.caps.per_frame_control(props) and
+                             its.caps.lsc_map(props) and
+                             its.caps.lsc_off(props))
+
+        assert(props.has_key("android.lens.info.shadingMapSize") and
+               props["android.lens.info.shadingMapSize"] != None)
+
+        # lsc_off devices should always support OFF(0), FAST(1), and HQ(2)
+        assert(props.has_key("android.shading.availableModes") and
+               set(props["android.shading.availableModes"]) == set([0, 1, 2]))
+
+        num_map_gains = props["android.lens.info.shadingMapSize"]["width"] * \
+                        props["android.lens.info.shadingMapSize"]["height"] * 4
+
+        # Test 1: Switching shading modes several times and verify:
+        #   1. Lens shading maps with mode OFF are all 1.0
+        #   2. Lens shading maps with mode FAST are similar after switching
+        #      shading modes.
+        #   3. Lens shading maps with mode HIGH_QUALITY are similar after
+        #      switching shading modes.
+        cam.do_3a();
+
+        # Get the reference lens shading maps for OFF, FAST, and HIGH_QUALITY
+        # in different sessions.
+        # reference_maps[mode]
+        reference_maps = [[] for mode in range(3)]
+        reference_maps[0] = [1.0] * num_map_gains
+        for mode in range(1, 3):
+            req = its.objects.auto_capture_request();
+            req["android.statistics.lensShadingMapMode"] = 1
+            req["android.shading.mode"] = mode
+            reference_maps[mode] = cam.do_capture(req)["metadata"] \
+                    ["android.statistics.lensShadingMap"]
+
+        # Get the lens shading maps while switching modes in one session.
+        reqs = []
+        for i in range(NUM_SHADING_MODE_SWITCH_LOOPS):
+            for mode in range(3):
+                req = its.objects.auto_capture_request();
+                req["android.statistics.lensShadingMapMode"] = 1
+                req["android.shading.mode"] = mode
+                reqs.append(req);
+
+        caps = cam.do_capture(reqs)
+
+        # shading_maps[mode][loop]
+        shading_maps = [[[] for loop in range(NUM_SHADING_MODE_SWITCH_LOOPS)]
+                for mode in range(3)]
+
+        # Get the shading maps out of capture results
+        for i in range(len(caps)):
+            shading_maps[i % 3][i / 3] = \
+                    caps[i]["metadata"]["android.statistics.lensShadingMap"]
+
+        # Draw the maps
+        for mode in range(3):
+            for i in range(NUM_SHADING_MODE_SWITCH_LOOPS):
+                pylab.clf()
+                pylab.plot(range(num_map_gains), shading_maps[mode][i], 'r')
+                pylab.plot(range(num_map_gains), reference_maps[mode], 'g')
+                pylab.xlim([0, num_map_gains])
+                pylab.ylim([0.9, 4.0])
+                matplotlib.pyplot.savefig("%s_ls_maps_mode_%d_loop_%d.png" %
+                                          (NAME, mode, i))
+
+        print "Verifying lens shading maps with mode OFF are all 1.0"
+        for i in range(NUM_SHADING_MODE_SWITCH_LOOPS):
+            assert(numpy.allclose(shading_maps[0][i], reference_maps[0]))
+
+        for mode in range(1, 3):
+            print "Verifying lens shading maps with mode", mode, "are similar"
+            for i in range(NUM_SHADING_MODE_SWITCH_LOOPS):
+                assert(numpy.allclose(shading_maps[mode][i],
+                                      reference_maps[mode],
+                                      THRESHOLD_DIFF_RATIO))
+
+if __name__ == '__main__':
+    main()
diff --git a/apps/CameraITS/tests/scene1/test_reprocess_noise_reduction.py b/apps/CameraITS/tests/scene1/test_reprocess_noise_reduction.py
new file mode 100644
index 0000000..757dfeb
--- /dev/null
+++ b/apps/CameraITS/tests/scene1/test_reprocess_noise_reduction.py
@@ -0,0 +1,148 @@
+# Copyright 2015 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.caps
+import its.device
+import its.objects
+import its.target
+import math
+import matplotlib
+import matplotlib.pyplot
+import numpy
+import os.path
+import pylab
+
+def main():
+    """Test that the android.noiseReduction.mode param is applied when set for
+       reprocessing requests.
+
+    Capture reprocessed images with the camera dimly lit. Uses a high analog
+    gain to ensure the captured image is noisy.
+
+    Captures three reprocessed images, for NR off, "fast", and "high quality".
+    Also captures a reprocessed image with low gain and NR off, and uses the
+    variance of this as the baseline.
+    """
+
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    RELATIVE_ERROR_TOLERANCE = 0.1
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+
+        its.caps.skip_unless(its.caps.compute_target_exposure(props) and
+                             its.caps.per_frame_control(props) and
+                             its.caps.noise_reduction_mode(props, 0) and
+                             (its.caps.yuv_reprocess(props) or
+                              its.caps.private_reprocess(props)))
+
+        # If reprocessing is supported, ZSL NR mode must be avaiable.
+        assert(its.caps.noise_reduction_mode(props, 4))
+
+        reprocess_formats = []
+        if (its.caps.yuv_reprocess(props)):
+            reprocess_formats.append("yuv")
+        if (its.caps.private_reprocess(props)):
+            reprocess_formats.append("private")
+
+        for reprocess_format in reprocess_formats:
+            # List of variances for R, G, B.
+            variances = []
+            nr_modes_reported = []
+
+            # NR mode 0 with low gain
+            e, s = its.target.get_target_exposure_combos(cam)["minSensitivity"]
+            req = its.objects.manual_capture_request(s, e)
+            req["android.noiseReduction.mode"] = 0
+
+            # Test reprocess_format->JPEG reprocessing
+            # TODO: Switch to reprocess_format->YUV when YUV reprocessing is
+            #       supported.
+            size = its.objects.get_available_output_sizes("jpg", props)[0]
+            out_surface = {"width":size[0], "height":size[1], "format":"jpg"}
+            cap = cam.do_capture(req, out_surface, reprocess_format)
+            img = its.image.decompress_jpeg_to_rgb_image(cap["data"])
+            its.image.write_image(img, "%s_low_gain_fmt=jpg.jpg" % (NAME))
+            tile = its.image.get_image_patch(img, 0.45, 0.45, 0.1, 0.1)
+            ref_variance = its.image.compute_image_variances(tile)
+            print "Ref variances:", ref_variance
+
+            for nr_mode in range(5):
+                # Skip unavailable modes
+                if not its.caps.noise_reduction_mode(props, nr_mode):
+                    nr_modes_reported.append(nr_mode)
+                    variances.append(0)
+                    continue
+
+                # NR modes with high gain
+                e, s = its.target.get_target_exposure_combos(cam) \
+                    ["maxSensitivity"]
+                req = its.objects.manual_capture_request(s, e)
+                req["android.noiseReduction.mode"] = nr_mode
+                cap = cam.do_capture(req, out_surface, reprocess_format)
+                nr_modes_reported.append(
+                    cap["metadata"]["android.noiseReduction.mode"])
+
+                img = its.image.decompress_jpeg_to_rgb_image(cap["data"])
+                its.image.write_image(
+                    img, "%s_high_gain_nr=%d_fmt=jpg.jpg" % (NAME, nr_mode))
+                tile = its.image.get_image_patch(img, 0.45, 0.45, 0.1, 0.1)
+                # Get the variances for R, G, and B channels
+                variance = its.image.compute_image_variances(tile)
+                variances.append(
+                    [variance[chan] / ref_variance[chan] for chan in range(3)])
+            print "Variances with NR mode [0,1,2,3,4]:", variances
+
+            # Draw a plot.
+            for chan in range(3):
+                line = []
+                for nr_mode in range(5):
+                    line.append(variances[nr_mode][chan])
+                pylab.plot(range(5), line, "rgb"[chan])
+
+            matplotlib.pyplot.savefig("%s_plot_%s_variances.png" %
+                                      (NAME, reprocess_format))
+
+            assert(nr_modes_reported == [0,1,2,3,4])
+
+            for j in range(3):
+                # Smaller variance is better
+                # Verify OFF(0) is not better than FAST(1)
+                assert(variances[0][j] >
+                       variances[1][j] * (1.0 - RELATIVE_ERROR_TOLERANCE))
+                # Verify FAST(1) is not better than HQ(2)
+                assert(variances[1][j] >
+                       variances[2][j] * (1.0 - RELATIVE_ERROR_TOLERANCE))
+                # Verify HQ(2) is better than OFF(0)
+                assert(variances[0][j] > variances[2][j])
+                if its.caps.noise_reduction_mode(props, 3):
+                    # Verify OFF(0) is not better than MINIMAL(3)
+                    assert(variances[0][j] >
+                           variances[3][j] * (1.0 - RELATIVE_ERROR_TOLERANCE))
+                    # Verify MINIMAL(3) is not better than HQ(2)
+                    assert(variances[3][j] >
+                           variances[2][j] * (1.0 - RELATIVE_ERROR_TOLERANCE))
+                    # Verify ZSL(4) is close to MINIMAL(3)
+                    assert(numpy.isclose(variances[4][j], variances[3][j],
+                                         RELATIVE_ERROR_TOLERANCE))
+                else:
+                    # Verify ZSL(4) is close to OFF(0)
+                    assert(numpy.isclose(variances[4][j], variances[0][j],
+                                         RELATIVE_ERROR_TOLERANCE))
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/scene1/test_tonemap_sequence.py b/apps/CameraITS/tests/scene1/test_tonemap_sequence.py
index 18ca506..7c87ca2 100644
--- a/apps/CameraITS/tests/scene1/test_tonemap_sequence.py
+++ b/apps/CameraITS/tests/scene1/test_tonemap_sequence.py
@@ -40,7 +40,7 @@
         means = []
 
         # Capture 3 manual shots with a linear tonemap.
-        req = its.objects.manual_capture_request(sens, exp_time, True)
+        req = its.objects.manual_capture_request(sens, exp_time, True, props)
         for i in [0,1,2]:
             cap = cam.do_capture(req)
             img = its.image.convert_capture_to_rgb_image(cap)
diff --git a/apps/CameraITS/tests/scene1/test_yuv_jpeg_all.py b/apps/CameraITS/tests/scene1/test_yuv_jpeg_all.py
index 1b278ef..0c428fc 100644
--- a/apps/CameraITS/tests/scene1/test_yuv_jpeg_all.py
+++ b/apps/CameraITS/tests/scene1/test_yuv_jpeg_all.py
@@ -35,7 +35,7 @@
         # Use a manual request with a linear tonemap so that the YUV and JPEG
         # should look the same (once converted by the its.image module).
         e, s = its.target.get_target_exposure_combos(cam)["midExposureTime"]
-        req = its.objects.manual_capture_request(s, e, True)
+        req = its.objects.manual_capture_request(s, e, True, props)
 
         rgbs = []
 
diff --git a/apps/CameraITS/tests/scene1/test_yuv_plus_jpeg.py b/apps/CameraITS/tests/scene1/test_yuv_plus_jpeg.py
index 6daa243..9ce8d76 100644
--- a/apps/CameraITS/tests/scene1/test_yuv_plus_jpeg.py
+++ b/apps/CameraITS/tests/scene1/test_yuv_plus_jpeg.py
@@ -37,7 +37,7 @@
         # Use a manual request with a linear tonemap so that the YUV and JPEG
         # should look the same (once converted by the its.image module).
         e, s = its.target.get_target_exposure_combos(cam)["midExposureTime"]
-        req = its.objects.manual_capture_request(s, e, True)
+        req = its.objects.manual_capture_request(s, e, True, props)
 
         cap_yuv, cap_jpeg = cam.do_capture(req, [fmt_yuv, fmt_jpeg])
 
diff --git a/apps/CameraITS/tests/scene1/test_yuv_plus_raw.py b/apps/CameraITS/tests/scene1/test_yuv_plus_raw.py
index eb01c1a..f13801b 100644
--- a/apps/CameraITS/tests/scene1/test_yuv_plus_raw.py
+++ b/apps/CameraITS/tests/scene1/test_yuv_plus_raw.py
@@ -36,7 +36,7 @@
         # Use a manual request with a linear tonemap so that the YUV and RAW
         # should look the same (once converted by the its.image module).
         e, s = its.target.get_target_exposure_combos(cam)["midExposureTime"]
-        req = its.objects.manual_capture_request(s, e, True)
+        req = its.objects.manual_capture_request(s, e, True, props)
 
         cap_raw, cap_yuv = cam.do_capture(req, cam.CAP_RAW_YUV)
 
diff --git a/apps/CameraITS/tests/scene1/test_yuv_plus_raw10.py b/apps/CameraITS/tests/scene1/test_yuv_plus_raw10.py
index 910a8ea..e52946d 100644
--- a/apps/CameraITS/tests/scene1/test_yuv_plus_raw10.py
+++ b/apps/CameraITS/tests/scene1/test_yuv_plus_raw10.py
@@ -36,7 +36,7 @@
         # Use a manual request with a linear tonemap so that the YUV and RAW
         # should look the same (once converted by the its.image module).
         e, s = its.target.get_target_exposure_combos(cam)["midExposureTime"]
-        req = its.objects.manual_capture_request(s, e, True)
+        req = its.objects.manual_capture_request(s, e, True, props)
 
         cap_raw, cap_yuv = cam.do_capture(req,
                 [{"format":"raw10"}, {"format":"yuv"}])
diff --git a/apps/CameraITS/tests/scene1/test_yuv_plus_raw12.py b/apps/CameraITS/tests/scene1/test_yuv_plus_raw12.py
new file mode 100644
index 0000000..c5c3c73
--- /dev/null
+++ b/apps/CameraITS/tests/scene1/test_yuv_plus_raw12.py
@@ -0,0 +1,63 @@
+# Copyright 2015 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.caps
+import its.device
+import its.objects
+import its.target
+import os.path
+import math
+
+def main():
+    """Test capturing a single frame as both RAW12 and YUV outputs.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    THRESHOLD_MAX_RMS_DIFF = 0.035
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.compute_target_exposure(props) and
+                             its.caps.raw12(props) and
+                             its.caps.per_frame_control(props))
+
+        # Use a manual request with a linear tonemap so that the YUV and RAW
+        # should look the same (once converted by the its.image module).
+        e, s = its.target.get_target_exposure_combos(cam)["midExposureTime"]
+        req = its.objects.manual_capture_request(s, e, True, props)
+
+        cap_raw, cap_yuv = cam.do_capture(req,
+                [{"format":"raw12"}, {"format":"yuv"}])
+
+        img = its.image.convert_capture_to_rgb_image(cap_yuv)
+        its.image.write_image(img, "%s_yuv.jpg" % (NAME), True)
+        tile = its.image.get_image_patch(img, 0.45, 0.45, 0.1, 0.1)
+        rgb0 = its.image.compute_image_means(tile)
+
+        # Raw shots are 1/2 x 1/2 smaller after conversion to RGB, so scale the
+        # tile appropriately.
+        img = its.image.convert_capture_to_rgb_image(cap_raw, props=props)
+        its.image.write_image(img, "%s_raw.jpg" % (NAME), True)
+        tile = its.image.get_image_patch(img, 0.475, 0.475, 0.05, 0.05)
+        rgb1 = its.image.compute_image_means(tile)
+
+        rms_diff = math.sqrt(
+                sum([pow(rgb0[i] - rgb1[i], 2.0) for i in range(3)]) / 3.0)
+        print "RMS difference:", rms_diff
+        assert(rms_diff < THRESHOLD_MAX_RMS_DIFF)
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/scene2/SampleTarget.jpg b/apps/CameraITS/tests/scene2/SampleTarget.jpg
new file mode 100644
index 0000000..c054f7e
--- /dev/null
+++ b/apps/CameraITS/tests/scene2/SampleTarget.jpg
Binary files differ
diff --git a/apps/CameraITS/tests/scene2/test_faces.py b/apps/CameraITS/tests/scene2/test_faces.py
new file mode 100644
index 0000000..cce74e7
--- /dev/null
+++ b/apps/CameraITS/tests/scene2/test_faces.py
@@ -0,0 +1,102 @@
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.device
+import its.objects
+import os.path
+
+def main():
+    """Test face detection.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+    NUM_TEST_FRAMES = 20
+    FD_MODE_OFF = 0
+    FD_MODE_SIMPLE = 1
+    FD_MODE_FULL = 2
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+        fd_modes = props['android.statistics.info.availableFaceDetectModes']
+        a = props['android.sensor.info.activeArraySize']
+        aw, ah = a['right'] - a['left'], a['bottom'] - a['top']
+        cam.do_3a()
+        for fd_mode in fd_modes:
+            assert(FD_MODE_OFF <= fd_mode <= FD_MODE_FULL)
+            req = its.objects.auto_capture_request()
+            req['android.statistics.faceDetectMode'] = fd_mode
+            caps = cam.do_capture([req]*NUM_TEST_FRAMES)
+            for i,cap in enumerate(caps):
+                md = cap['metadata']
+                assert(md['android.statistics.faceDetectMode'] == fd_mode)
+                faces = md['android.statistics.faces']
+
+                # 0 faces should be returned for OFF mode
+                if fd_mode == FD_MODE_OFF:
+                    assert(len(faces) == 0)
+                    continue
+                # Face detection could take several frames to warm up,
+                # but it should detect at least one face in last frame
+                if i == NUM_TEST_FRAMES - 1:
+                    if len(faces) == 0:
+                        print "Error: no face detected in mode", fd_mode
+                        assert(0)
+                if len(faces) == 0:
+                    continue
+
+                print "Frame %d face metadata:" % i
+                print "  Faces:", faces
+                print ""
+
+                face_scores = [face['score'] for face in faces]
+                face_rectangles = [face['bounds'] for face in faces]
+                for score in face_scores:
+                    assert(score >= 1 and score <= 100)
+                # Face bounds should be within active array
+                for rect in face_rectangles:
+                    assert(rect['top'] < rect['bottom'])
+                    assert(rect['left'] < rect['right'])
+                    assert(0 <= rect['top'] <= ah)
+                    assert(0 <= rect['bottom'] <= ah)
+                    assert(0 <= rect['left'] <= aw)
+                    assert(0 <= rect['right'] <= aw)
+
+                # Face landmarks are reported if and only if fd_mode is FULL
+                # Face ID should be -1 for SIMPLE and unique for FULL
+                if fd_mode == FD_MODE_SIMPLE:
+                    for face in faces:
+                        assert('leftEye' not in face)
+                        assert('rightEye' not in face)
+                        assert('mouth' not in face)
+                        assert(face['id'] == -1)
+                elif fd_mode == FD_MODE_FULL:
+                    face_ids = [face['id'] for face in faces]
+                    assert(len(face_ids) == len(set(face_ids)))
+                    # Face landmarks should be within face bounds
+                    for face in faces:
+                        left_eye = face['leftEye']
+                        right_eye = face['rightEye']
+                        mouth = face['mouth']
+                        l, r = face['bounds']['left'], face['bounds']['right']
+                        t, b = face['bounds']['top'], face['bounds']['bottom']
+                        assert(l <= left_eye['x'] <= r)
+                        assert(t <= left_eye['y'] <= b)
+                        assert(l <= right_eye['x'] <= r)
+                        assert(t <= right_eye['y'] <= b)
+                        assert(l <= mouth['x'] <= r)
+                        assert(t <= mouth['y'] <= b)
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/scene3/test_reprocess_edge_enhancement.py b/apps/CameraITS/tests/scene3/test_reprocess_edge_enhancement.py
new file mode 100644
index 0000000..73834cb
--- /dev/null
+++ b/apps/CameraITS/tests/scene3/test_reprocess_edge_enhancement.py
@@ -0,0 +1,202 @@
+# Copyright 2015 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.caps
+import its.device
+import its.objects
+import its.target
+import math
+import matplotlib
+import matplotlib.pyplot
+import numpy
+import os.path
+import pylab
+
+
+def test_edge_mode(cam, edge_mode, sensitivity, exp, fd, out_surface,
+                   reprocess_format=None):
+    """Return sharpness of the output image and the capture result metadata
+       for a capture request with the given edge mode, sensitivity, exposure
+       time, focus distance, output surface parameter, and reprocess format
+       (None for a regular request.)
+
+    Args:
+        cam: An open device session.
+        edge_mode: Edge mode for the request as defined in android.edge.mode
+        sensitivity: Sensitivity for the request as defined in
+            android.sensor.sensitivity
+        exp: Exposure time for the request as defined in
+            android.sensor.exposureTime.
+        fd: Focus distance for the request as defined in
+            android.lens.focusDistance
+        output_surface: Specifications of the output image format and size.
+        reprocess_format: (Optional) The reprocessing format. If not None,
+                reprocessing will be enabled.
+
+    Returns:
+        Object containing reported edge mode and the sharpness of the output
+        image, keyed by the following strings:
+            "edge_mode"
+            "sharpness"
+    """
+
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    req = its.objects.manual_capture_request(sensitivity, exp)
+    req["android.lens.focusDistance"] = fd
+    req["android.edge.mode"] = edge_mode
+    if (reprocess_format != None):
+        req["android.reprocess.effectiveExposureFactor"] = 1.0
+    cap = cam.do_capture(req, out_surface, reprocess_format)
+
+    img = its.image.decompress_jpeg_to_rgb_image(cap["data"])
+    its.image.write_image(img, "%s_edge=%d_reprocess_fmt_%s.jpg" %
+        (NAME, edge_mode, reprocess_format))
+    tile = its.image.get_image_patch(img, 0.45, 0.45, 0.1, 0.1)
+
+    ret = {}
+    ret["edge_mode"] = cap["metadata"]["android.edge.mode"]
+    ret["sharpness"] = its.image.compute_image_sharpness(tile)
+
+    return ret
+
+def main():
+    """Test that the android.edge.mode param is applied when set for
+       reprocessing requests.
+
+    Capture non-reprocess images for each edge mode and calculate their
+    sharpness as a baseline.
+
+    Capture reprocessed images for each supported reprocess format and edge_mode
+    mode. Calculate the sharpness of reprocessed images and compare them against
+    the sharpess of non-reprocess images.
+    """
+
+    THRESHOLD_RELATIVE_SHARPNESS_DIFF = 0.1
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+
+        its.caps.skip_unless(its.caps.read_3a(props) and
+                             its.caps.per_frame_control(props) and
+                             its.caps.edge_mode(props, 0) and
+                             (its.caps.yuv_reprocess(props) or
+                              its.caps.private_reprocess(props)))
+
+        # If reprocessing is supported, ZSL EE mode must be avaiable.
+        assert(its.caps.edge_mode(props, 3))
+
+        reprocess_formats = []
+        if (its.caps.yuv_reprocess(props)):
+            reprocess_formats.append("yuv")
+        if (its.caps.private_reprocess(props)):
+            reprocess_formats.append("private")
+
+        size = its.objects.get_available_output_sizes("jpg", props)[0]
+        out_surface = {"width":size[0], "height":size[1], "format":"jpg"}
+
+        # Get proper sensitivity, exposure time, and focus distance.
+        s,e,_,_,fd = cam.do_3a(get_results=True)
+
+        # Get the sharpness for each edge mode for regular requests
+        sharpness_regular = []
+        edge_mode_reported_regular = []
+        for edge_mode in range(4):
+            # Skip unavailable modes
+            if not its.caps.edge_mode(props, edge_mode):
+                edge_mode_reported_regular.append(edge_mode)
+                sharpness_regular.append(0)
+                continue
+            ret = test_edge_mode(cam, edge_mode, s, e, fd, out_surface)
+            edge_mode_reported_regular.append(ret["edge_mode"])
+            sharpness_regular.append(ret["sharpness"])
+
+        print "Reported edge modes:", edge_mode_reported_regular
+        print "Sharpness with EE mode [0,1,2,3]:", sharpness_regular
+
+        # Get the sharpness for each reprocess format and edge mode for
+        # reprocess requests.
+        sharpnesses_reprocess = []
+        edge_mode_reported_reprocess = []
+
+        for reprocess_format in reprocess_formats:
+            # List of sharpness
+            sharpnesses = []
+            edge_mode_reported = []
+            for edge_mode in range(4):
+                # Skip unavailable modes
+                if not its.caps.edge_mode(props, edge_mode):
+                    edge_mode_reported.append(edge_mode)
+                    sharpnesses.append(0)
+                    continue
+
+                ret = test_edge_mode(cam, edge_mode, s, e, fd, out_surface,
+                    reprocess_format)
+                edge_mode_reported.append(ret["edge_mode"])
+                sharpnesses.append(ret["sharpness"])
+
+            sharpnesses_reprocess.append(sharpnesses)
+            edge_mode_reported_reprocess.append(edge_mode_reported)
+
+            print "Reported edge modes:", edge_mode_reported
+            print "Sharpness with EE mode [0,1,2,3] for %s reprocess:" % \
+                (reprocess_format) , sharpnesses
+
+
+        # Verify HQ(2) is sharper than OFF(0)
+        assert(sharpness_regular[2] > sharpness_regular[0])
+
+        # Verify ZSL(3) is similar to OFF(0)
+        assert(numpy.isclose(sharpness_regular[3], sharpness_regular[0],
+                             THRESHOLD_RELATIVE_SHARPNESS_DIFF))
+
+        # Verify OFF(0) is not sharper than FAST(1)
+        assert(sharpness_regular[1] >
+               sharpness_regular[0] * (1.0 - THRESHOLD_RELATIVE_SHARPNESS_DIFF))
+
+        # Verify FAST(1) is not sharper than HQ(2)
+        assert(sharpness_regular[2] >
+               sharpness_regular[1] * (1.0 - THRESHOLD_RELATIVE_SHARPNESS_DIFF))
+
+        for reprocess_format in range(len(reprocess_formats)):
+            # Verify HQ(2) is sharper than OFF(0)
+            assert(sharpnesses_reprocess[reprocess_format][2] >
+                   sharpnesses_reprocess[reprocess_format][0])
+
+            # Verify ZSL(3) is similar to OFF(0)
+            assert(numpy.isclose(sharpnesses_reprocess[reprocess_format][3],
+                                 sharpnesses_reprocess[reprocess_format][0],
+                                 THRESHOLD_RELATIVE_SHARPNESS_DIFF))
+
+            # Verify OFF(0) is not sharper than FAST(1)
+            assert(sharpnesses_reprocess[reprocess_format][1] >
+                   sharpnesses_reprocess[reprocess_format][0] *
+                   (1.0 - THRESHOLD_RELATIVE_SHARPNESS_DIFF))
+
+            # Verify FAST(1) is not sharper than HQ(2)
+            assert(sharpnesses_reprocess[reprocess_format][2] >
+                   sharpnesses_reprocess[reprocess_format][1] *
+                   (1.0 - THRESHOLD_RELATIVE_SHARPNESS_DIFF))
+
+            # Verify reprocessing HQ(2) is similar to regular HQ(2) relative to
+            # OFF(0)
+            assert(numpy.isclose(sharpnesses_reprocess[reprocess_format][2] /
+                                    sharpnesses_reprocess[reprocess_format][0],
+                                 sharpness_regular[2] / sharpness_regular[0],
+                                 THRESHOLD_RELATIVE_SHARPNESS_DIFF))
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tools/config.py b/apps/CameraITS/tools/config.py
index 6e83412..52929aa 100644
--- a/apps/CameraITS/tools/config.py
+++ b/apps/CameraITS/tools/config.py
@@ -44,7 +44,7 @@
     # Command line args, ignoring any args that will be passed down to the
     # ItsSession constructor.
     args = [s for s in sys.argv if s[:6] not in \
-            ["reboot", "camera", "target", "noinit"]]
+            ["reboot", "camera", "target", "device"]]
 
     if len(args) == 1:
         with its.device.ItsSession() as cam:
diff --git a/apps/CameraITS/tools/convert_yuv_to_jpg.py b/apps/CameraITS/tools/convert_yuv_to_jpg.py
new file mode 100644
index 0000000..4498c2a
--- /dev/null
+++ b/apps/CameraITS/tools/convert_yuv_to_jpg.py
@@ -0,0 +1,37 @@
+# Copyright 2015 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import sys
+
+def main():
+    """Open a YUV420 file and save it as a JPEG.
+
+    Command line args:
+        filename.yuv: The YUV420 file to open.
+        w: The width of the image.
+        h: The height of the image.
+        layout: The layout of the data, in ["planar", "nv21"].
+    """
+    if len(sys.argv) != 5:
+        print "Usage: python %s <filename.yuv> <w> <h> <layout>"%(sys.argv[0])
+    else:
+        fname, w,h = sys.argv[1], int(sys.argv[2]), int(sys.argv[3])
+        layout = sys.argv[4]
+        img = its.image.load_yuv420_to_rgb_image(fname, w,h, layout=layout)
+        its.image.write_image(img, fname.replace(".yuv",".jpg"), False)
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tools/run_all_tests.py b/apps/CameraITS/tools/run_all_tests.py
index 2bbd387..dd12512 100644
--- a/apps/CameraITS/tools/run_all_tests.py
+++ b/apps/CameraITS/tools/run_all_tests.py
@@ -41,12 +41,20 @@
             "test_ev_compensation_advanced",
             "test_ev_compensation_basic",
             "test_yuv_plus_jpeg"
-        ]
+        ],
+        "scene2":[],
+        "scene3":[]
     }
 
     # Get all the scene0 and scene1 tests, which can be run using the same
     # physical setup.
-    scenes = ["scene0", "scene1"]
+    scenes = ["scene0", "scene1", "scene2", "scene3"]
+    scene_req = {
+        "scene0" : None,
+        "scene1" : "A grey card covering at least the middle 30% of the scene",
+        "scene2" : "A picture containing human faces",
+        "scene3" : "A chart containing sharp edges like ISO 12233"
+    }
     tests = []
     for d in scenes:
         tests += [(d,s[:-3],os.path.join("tests", d, s))
@@ -58,6 +66,10 @@
     topdir = tempfile.mkdtemp()
     print "Saving output files to:", topdir, "\n"
 
+    device_id = its.device.get_device_id()
+    device_id_arg = "device=" + device_id
+    print "Testing device " + device_id
+
     camera_ids = []
     for s in sys.argv[1:]:
         if s[:7] == "camera=" and len(s) > 7:
@@ -68,7 +80,8 @@
         camera_ids_path = os.path.join(topdir, "camera_ids.txt")
         out_arg = "out=" + camera_ids_path
         cmd = ['python',
-               os.path.join(os.getcwd(),"tools/get_camera_ids.py"), out_arg]
+               os.path.join(os.getcwd(),"tools/get_camera_ids.py"), out_arg,
+               device_id_arg]
         retcode = subprocess.call(cmd,cwd=topdir)
         assert(retcode == 0)
         with open(camera_ids_path, "r") as f:
@@ -86,14 +99,6 @@
         for d in scenes:
             os.mkdir(os.path.join(topdir, camera_id, d))
 
-        out_path = os.path.join(topdir, camera_id, "scene.jpg")
-        out_arg = "out=" + out_path
-        cmd = ['python',
-               os.path.join(os.getcwd(),"tools/validate_scene.py"),
-               camera_id_arg, out_arg]
-        retcode = subprocess.call(cmd,cwd=topdir)
-        assert(retcode == 0)
-
         print "Start running ITS on camera: ", camera_id
         # Run each test, capturing stdout and stderr.
         summary = "ITS test result summary for camera " + camera_id + "\n"
@@ -102,7 +107,19 @@
         numnotmandatedfail = 0
         numfail = 0
 
+        prev_scene = ""
         for (scene,testname,testpath) in tests:
+            if scene != prev_scene and scene_req[scene] != None:
+                out_path = os.path.join(topdir, camera_id, scene+".jpg")
+                out_arg = "out=" + out_path
+                scene_arg = "scene=" + scene_req[scene]
+                cmd = ['python',
+                        os.path.join(os.getcwd(),"tools/validate_scene.py"),
+                        camera_id_arg, out_arg, scene_arg, device_id_arg]
+                retcode = subprocess.call(cmd,cwd=topdir)
+                assert(retcode == 0)
+                print "Start running tests for", scene
+            prev_scene = scene
             cmd = ['python', os.path.join(os.getcwd(),testpath)] + \
                   sys.argv[1:] + [camera_id_arg]
             outdir = os.path.join(topdir,camera_id,scene)
@@ -158,7 +175,7 @@
         summary_path = os.path.join(topdir, camera_id, "summary.txt")
         with open(summary_path, "w") as f:
             f.write(summary)
-        its.device.report_result(camera_id, result, summary_path)
+        its.device.report_result(device_id, camera_id, result, summary_path)
 
     print "ITS tests finished. Please go back to CtsVerifier and proceed"
 
diff --git a/apps/CameraITS/tools/validate_scene.py b/apps/CameraITS/tools/validate_scene.py
index e1e89f2..1f35163 100644
--- a/apps/CameraITS/tools/validate_scene.py
+++ b/apps/CameraITS/tools/validate_scene.py
@@ -16,17 +16,26 @@
 import its.device
 import its.objects
 import its.image
+import its.caps
+import re
 
 def main():
     """capture a yuv image and save it to argv[1]
     """
     camera_id = -1
     out_path = ""
+    scene_name = ""
+    scene_desc = "No requirement"
     for s in sys.argv[1:]:
         if s[:7] == "camera=" and len(s) > 7:
             camera_id = s[7:]
         elif s[:4] == "out=" and len(s) > 4:
             out_path = s[4:]
+        elif s[:6] == "scene=" and len(s) > 6:
+            scene_desc = s[6:]
+
+    if out_path != "":
+        scene_name = re.split("/|\.", out_path)[-2]
 
     if camera_id == -1:
         print "Error: need to specify which camera to use"
@@ -34,13 +43,16 @@
 
     with its.device.ItsSession() as cam:
         raw_input("Press Enter after placing camera " + camera_id +
-                " to frame the test scene")
+                " to frame the test scene: " + scene_name +
+                "\nThe scene setup should be: " + scene_desc )
         # Converge 3A prior to capture.
         cam.do_3a(do_af=True, lock_ae=True, lock_awb=True)
         props = cam.get_camera_properties()
         req = its.objects.fastest_auto_capture_request(props)
-        req["android.control.awbLock"] = True
-        req["android.control.aeLock"] = True
+        if its.caps.ae_lock(props):
+            req["android.control.awbLock"] = True
+        if its.caps.awb_lock(props):
+            req["android.control.aeLock"] = True
         while True:
             print "Capture an image to check the test scene"
             cap = cam.do_capture(req)
@@ -49,7 +61,8 @@
                 its.image.write_image(img, out_path)
             print "Please check scene setup in", out_path
             choice = raw_input(
-                "Is the image okay for ITS scene1? (Y/N)").lower()
+                "Is the image okay for ITS " + scene_name +\
+                "? (Y/N)").lower()
             if choice == "y":
                 break
             else:
diff --git a/apps/CtsVerifier/Android.mk b/apps/CtsVerifier/Android.mk
index dc5fda5..8cec7ea 100644
--- a/apps/CtsVerifier/Android.mk
+++ b/apps/CtsVerifier/Android.mk
@@ -30,11 +30,20 @@
                                compatibility-common-util-devicesidelib_v2 \
                                cts-sensors-tests \
                                ctstestrunner \
+                               apache-commons-math \
+                               androidplot \
+                               ctsverifier-opencv \
+                               core-tests \
+                               android-support-v4  \
+                               mockito-target \
+                               mockwebserver \
+                               compatibility-device-util_v2 \
 
 LOCAL_PACKAGE_NAME := CtsVerifier
 
-LOCAL_JNI_SHARED_LIBRARIES := libctsverifier_jni \
-	#libcameraanalyzer # Needed for the disabled CameraAnalyzer tests
+LOCAL_AAPT_FLAGS += --version-name "6.0_r0 $(BUILD_NUMBER)"
+
+LOCAL_JNI_SHARED_LIBRARIES := libctsverifier_jni libaudioloopback_jni
 
 LOCAL_PROGUARD_FLAG_FILES := proguard.flags
 
@@ -44,13 +53,25 @@
 
 include $(BUILD_PACKAGE)
 
+
+# opencv library
+include $(CLEAR_VARS)
+
+LOCAL_PREBUILT_STATIC_JAVA_LIBRARIES := \
+        ctsverifier-opencv:libs/opencv-android.jar
+
+include $(BUILD_MULTI_PREBUILT)
+
+
 notification-bot := $(call intermediates-dir-for,APPS,NotificationBot)/package.apk
+permission-app := $(call intermediates-dir-for,APPS,CtsPermissionApp)/package.apk
 
 # Builds and launches CTS Verifier on a device.
 .PHONY: cts-verifier
-cts-verifier: CtsVerifier adb NotificationBot
+cts-verifier: CtsVerifier adb NotificationBot CtsPermissionApp
 	adb install -r $(PRODUCT_OUT)/data/app/CtsVerifier/CtsVerifier.apk \
 		&& adb install -r $(notification-bot) \
+		&& adb install -r $(permission-app) \
 		&& adb shell "am start -n com.android.cts.verifier/.CtsVerifierActivity"
 
 #
@@ -87,10 +108,12 @@
 endif
 $(verifier-zip) : $(HOST_OUT)/CameraITS
 $(verifier-zip) : $(notification-bot)
+$(verifier-zip) : $(permission-app)
 $(verifier-zip) : $(call intermediates-dir-for,APPS,CtsVerifier)/package.apk | $(ACP)
 		$(hide) mkdir -p $(verifier-dir)
 		$(hide) $(ACP) -fp $< $(verifier-dir)/CtsVerifier.apk
 		$(ACP) -fp $(notification-bot) $(verifier-dir)/NotificationBot.apk
+		$(ACP) -fp $(permission-app) $(verifier-dir)/CtsPermissionApp.apk
 ifeq ($(HOST_OS),linux)
 		$(hide) $(ACP) -fp $(HOST_OUT)/bin/cts-usb-accessory $(verifier-dir)/cts-usb-accessory
 endif
diff --git a/apps/CtsVerifier/AndroidManifest.xml b/apps/CtsVerifier/AndroidManifest.xml
index ae62f84..217913c 100644
--- a/apps/CtsVerifier/AndroidManifest.xml
+++ b/apps/CtsVerifier/AndroidManifest.xml
@@ -17,10 +17,9 @@
 
 <manifest xmlns:android="http://schemas.android.com/apk/res/android"
       package="com.android.cts.verifier"
-      android:versionCode="5"
-      android:versionName="5.1_r1">
+      android:versionCode="5">
 
-    <uses-sdk android:minSdkVersion="19" android:targetSdkVersion="21"/>
+    <uses-sdk android:minSdkVersion="19" android:targetSdkVersion="23"/>
 
     <uses-permission android:name="android.permission.ACCESS_FINE_LOCATION" />
     <uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
@@ -57,6 +56,7 @@
     <uses-permission android:name="android.permission.ACCESS_NETWORK_STATE"/>
     <uses-permission android:name="com.android.providers.tv.permission.READ_EPG_DATA" />
     <uses-permission android:name="com.android.providers.tv.permission.WRITE_EPG_DATA" />
+    <uses-permission android:name="android.permission.USE_FINGERPRINT"/>
 
     <!-- Needed by the Audio Quality Verifier to store the sound samples that will be mailed. -->
     <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
@@ -353,6 +353,28 @@
             <meta-data android:name="test_category" android:value="@string/test_category_security" />
         </activity>
 
+        <activity android:name=".security.FingerprintBoundKeysTest"
+                android:label="@string/sec_fingerprint_bound_key_test"
+                android:configChanges="keyboardHidden|orientation|screenSize" >
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.cts.intent.category.MANUAL_TEST" />
+            </intent-filter>
+            <meta-data android:name="test_category" android:value="@string/test_category_security" />
+            <meta-data android:name="test_excluded_features"
+                       android:value="android.hardware.type.television:android.software.leanback:android.hardware.type.watch" />
+        </activity>
+        <activity android:name=".security.ScreenLockBoundKeysTest"
+                android:label="@string/sec_lock_bound_key_test"
+                android:configChanges="keyboardHidden|orientation|screenSize" >
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.cts.intent.category.MANUAL_TEST" />
+            </intent-filter>
+            <meta-data android:name="test_category" android:value="@string/test_category_security" />
+            <meta-data android:name="test_excluded_features"
+                       android:value="android.hardware.type.television:android.software.leanback:android.hardware.type.watch" />
+        </activity>
         <activity android:name=".security.LockConfirmBypassTest"
                 android:label="@string/lock_confirm_test_title"
                 android:configChanges="keyboardHidden|orientation|screenSize" >
@@ -430,6 +452,10 @@
                 android:label="@string/nfc_ndef_push_receiver"
                 android:configChanges="keyboardHidden|orientation|screenSize" />
 
+        <activity android:name=".nfc.LlcpVersionActivity"
+                android:label="@string/nfc_llcp_version_check"
+                android:configChanges="keyboardHidden|orientation|screenSize" />
+
         <activity android:name=".nfc.TagVerifierActivity"
                 android:label="@string/nfc_tag_verifier"
                 android:configChanges="keyboardHidden|orientation|screenSize" />
@@ -720,6 +746,33 @@
                        android:value="android.hardware.sensor.compass" />
         </activity>
 
+        <activity
+            android:name=".sensors.RVCVXCheckTestActivity"
+            android:keepScreenOn="true"
+            android:label="@string/snsr_rvcvxchk_test"
+            android:screenOrientation="locked" >
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.cts.intent.category.MANUAL_TEST"/>
+            </intent-filter>
+
+            <meta-data
+                android:name="test_category"
+                android:value="@string/test_category_sensors" />
+            <meta-data
+                android:name="test_required_features"
+                android:value="android.hardware.sensor.accelerometer:android.hardware.sensor.gyroscope:android.hardware.sensor.compass:android.hardware.camera.any" />
+            <meta-data android:name="test_excluded_features"
+                    android:value="android.hardware.type.television" />
+        </activity>
+        <activity
+            android:name=".sensors.RVCVRecordActivity"
+            android:keepScreenOn="true"
+            android:label="@string/snsr_rvcvxchk_test_rec"
+            android:screenOrientation="locked" >
+        </activity>
+
+
         <!-- TODO: enable when a full set of verifications can be implemented -->
         <!--activity android:name=".sensors.RotationVectorTestActivity"
                   android:label="@string/snsr_rot_vec_test"
@@ -872,23 +925,6 @@
 
             <meta-data android:name="test_required_features" android:value="android.hardware.camera.any"/>
         </activity>
-<!-- Experimental. If re-enabling, libcameraanalyzer must be included in the build
-        <activity android:name=".camera.analyzer.CameraAnalyzerActivity"
-                 android:label="@string/camera_analyzer"
-                 android:screenOrientation="landscape">
-            <intent-filter>
-                <action android:name="android.intent.action.MAIN" />
-                <category android:name="android.cts.intent.category.MANUAL_TEST" />
-            </intent-filter>
-            <meta-data android:name="test_category" android:value="@string/test_category_camera" />
-
-            <intent-filter>
-                <action android:name="android.hardware.usb.action.USB_ACCESSORY_ATTACHED" />
-            </intent-filter>
-            <meta-data android:name="android.hardware.usb.action.USB_ACCESSORY_ATTACHED"
-                android:resource="@xml/accessory_filter_adk" />
-        </activity>
--->
 
         <activity android:name=".camera.intents.CameraIntentsActivity"
                  android:label="@string/camera_intents">
@@ -962,6 +998,17 @@
             <meta-data android:name="test_required_features" android:value="android.hardware.camera.any" />
         </activity>
 
+        <activity android:name=".camera.flashlight.CameraFlashlightActivity"
+                  android:label="@string/camera_flashlight_test"
+                  android:configChanges="keyboardHidden|orientation|screenSize">
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.cts.intent.category.MANUAL_TEST" />
+            </intent-filter>
+            <meta-data android:name="test_category" android:value="@string/test_category_camera" />
+            <meta-data android:name="test_required_features" android:value="android.hardware.camera.flash" />
+        </activity>
+
         <activity android:name=".usb.UsbAccessoryTestActivity"
                 android:label="@string/usb_accessory_test"
                 android:configChanges="keyboardHidden|orientation|screenSize">
@@ -1069,6 +1116,22 @@
                     android:value="android.software.leanback" />
         </activity>
 
+        <activity android:name=".security.KeyChainTest"
+                android:label="@string/keychain_test">
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.cts.intent.category.MANUAL_TEST" />
+            </intent-filter>
+            <meta-data android:name="test_category" android:value="@string/test_category_security" />
+            <!-- KeyChain is only installed on communication-oriented devices inheriting core.mk -->
+            <meta-data android:name="test_excluded_features"
+                    android:value="android.hardware.type.watch" />
+            <meta-data android:name="test_excluded_features"
+                    android:value="android.hardware.type.television" />
+            <meta-data android:name="test_excluded_features"
+                    android:value="android.software.leanback" />
+        </activity>
+
         <activity android:name=".p2p.GoNegRequesterTestListActivity"
                 android:label="@string/p2p_go_neg_requester"
                 android:configChanges="keyboardHidden|orientation|screenSize" />
@@ -1133,8 +1196,6 @@
             <meta-data android:name="test_category" android:value="@string/test_category_other" />
             <meta-data android:name="test_required_features"
                     android:value="android.software.app_widgets" />
-            <meta-data android:name="test_excluded_features"
-                       android:value="android.software.leanback" />
         </activity>
 
         <activity android:name=".deskclock.DeskClockTestsActivity"
@@ -1166,6 +1227,23 @@
                        android:value="android.hardware.type.television:android.software.leanback" />
         </activity>
 -->
+          <activity
+                android:name="com.android.cts.verifier.sensors.DeviceSuspendTestActivity"
+                android:label="@string/snsr_device_suspend_test"
+                android:screenOrientation="nosensor" >
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.cts.intent.category.MANUAL_TEST" />
+            </intent-filter>
+            <meta-data android:name="test_category" android:value="@string/test_category_sensors" />
+        </activity>
+
+        <receiver android:name="com.android.cts.verifier.sensors.DeviceSuspendTestActivity$AlarmReceiver">
+        </receiver>
+
+        <receiver android:name="com.android.cts.verifier.sensors.SignificantMotionTestActivity$AlarmReceiver">
+        </receiver>
+
         <activity
             android:name="com.android.cts.verifier.sensors.SignificantMotionTestActivity"
             android:label="@string/snsr_significant_motion_test"
@@ -1266,16 +1344,58 @@
                  android:label="@string/projection_service_name"
                  android:process=":projectionservice" />
 
-        <activity android:name=".managedprovisioning.DeviceOwnerTestActivity"
+        <activity android:name=".managedprovisioning.DeviceOwnerNegativeTestActivity"
                 android:label="@string/provisioning_device_owner">
             <intent-filter>
                 <action android:name="android.intent.action.MAIN" />
                 <category android:name="android.cts.intent.category.MANUAL_TEST" />
             </intent-filter>
             <meta-data android:name="test_category" android:value="@string/test_category_managed_provisioning" />
-            <meta-data android:name="test_required_features" android:value="android.software.managed_users:android.software.device_admin" />
+            <meta-data android:name="test_required_features" android:value="android.software.device_admin" />
         </activity>
 
+        <activity android:name=".managedprovisioning.DeviceOwnerPositiveTestActivity"
+                android:label="@string/positive_device_owner">
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.cts.intent.category.MANUAL_TEST" />
+            </intent-filter>
+            <meta-data android:name="test_category" android:value="@string/test_category_managed_provisioning" />
+            <meta-data android:name="test_required_features" android:value="android.software.device_admin" />
+        </activity>
+
+        <activity android:name=".managedprovisioning.DeviceOwnerPositiveTestActivity$CommandReceiver"
+                android:exported="false"
+                android:theme="@android:style/Theme.NoDisplay"
+                android:noHistory="true"
+                android:autoRemoveFromRecents="true"
+                android:stateNotNeeded="true">
+        </activity>
+
+        <activity android:name=".managedprovisioning.KeyguardDisabledFeaturesActivity"
+                android:label="@string/provisioning_byod_keyguard_disabled_features">
+        </activity>
+
+        <activity android:name=".managedprovisioning.WifiLockdownTestActivity"
+                android:label="@string/device_owner_wifi_lockdown_test">
+        </activity>
+
+        <activity android:name=".managedprovisioning.PermissionLockdownTestActivity"
+                android:label="@string/device_profile_owner_permission_lockdown_test">
+            <intent-filter>
+                <action android:name="com.android.cts.verifier.managedprovisioning.action.CHECK_PERMISSION_LOCKDOWN" />
+                <category android:name="android.intent.category.DEFAULT" />
+            </intent-filter>
+        </activity>
+
+        <activity-alias
+                android:name=".managedprovisioning.ManagedProfilePermissionLockdownTestActivity"
+                android:targetActivity=".managedprovisioning.PermissionLockdownTestActivity">
+            <intent-filter>
+                <action android:name="com.android.cts.verifier.managedprovisioning.action.MANAGED_PROFILE_CHECK_PERMISSION_LOCKDOWN" />
+                <category android:name="android.intent.category.DEFAULT" />
+            </intent-filter>
+        </activity-alias>
 
         <activity android:name=".managedprovisioning.ByodFlowTestActivity"
                 android:launchMode="singleTask"
@@ -1297,9 +1417,12 @@
                 <action android:name="com.android.cts.verifier.managedprovisioning.BYOD_QUERY" />
                 <action android:name="com.android.cts.verifier.managedprovisioning.BYOD_REMOVE" />
                 <action android:name="com.android.cts.verifier.managedprovisioning.BYOD_INSTALL_APK" />
+                <action android:name="com.android.cts.verifier.managedprovisioning.action.CHECK_INTENT_FILTERS" />
                 <action android:name="com.android.cts.verifier.managedprovisioning.BYOD_CAPTURE_AND_CHECK_IMAGE" />
                 <action android:name="com.android.cts.verifier.managedprovisioning.BYOD_CAPTURE_AND_CHECK_VIDEO" />
                 <action android:name="com.android.cts.verifier.managedprovisioning.BYOD_CAPTURE_AND_CHECK_AUDIO" />
+                <action android:name="com.android.cts.verifier.managedprovisioning.BYOD_KEYGUARD_DISABLED_FEATURES" />
+                <action android:name="com.android.cts.verifier.managedprovisioning.BYOD_LOCKNOW" />
                 <action android:name="com.android.cts.verifier.managedprovisioning.TEST_NFC_BEAM" />
                 <category android:name="android.intent.category.DEFAULT"></category>
             </intent-filter>
@@ -1333,16 +1456,25 @@
             </intent-filter>
         </activity>
 
+        <activity android:name=".managedprovisioning.WorkStatusTestActivity">
+            <intent-filter>
+                <action android:name="com.android.cts.verifier.managedprovisioning.WORK_STATUS_ICON" />
+                <action android:name="com.android.cts.verifier.managedprovisioning.WORK_STATUS_TOAST" />
+                <category android:name="android.intent.category.DEFAULT"></category>
+            </intent-filter>
+        </activity>
+
         <activity android:name=".managedprovisioning.WorkNotificationTestActivity">
             <intent-filter>
                 <action android:name="com.android.cts.verifier.managedprovisioning.WORK_NOTIFICATION" />
+                <action android:name="com.android.cts.verifier.managedprovisioning.LOCKSCREEN_NOTIFICATION" />
                 <action android:name="com.android.cts.verifier.managedprovisioning.CLEAR_WORK_NOTIFICATION" />
                 <category android:name="android.intent.category.DEFAULT"></category>
             </intent-filter>
         </activity>
 
         <receiver android:name=".managedprovisioning.DeviceAdminTestReceiver"
-                android:label="@string/provisioning_byod_device_admin"
+                android:label="@string/afw_device_admin"
                 android:permission="android.permission.BIND_DEVICE_ADMIN">
             <meta-data android:name="android.app.device_admin"
                        android:resource="@xml/device_admin_byod" />
@@ -1427,6 +1559,17 @@
                     android:value="android.software.live_tv" />
         </activity>
 
+        <activity android:name=".tv.TimeShiftTestActivity"
+                android:label="@string/tv_time_shift_test">
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.cts.intent.category.MANUAL_TEST" />
+            </intent-filter>
+            <meta-data android:name="test_category" android:value="@string/test_category_tv" />
+            <meta-data android:name="test_required_features"
+                    android:value="android.software.live_tv" />
+        </activity>
+
         <activity android:name=".screenpinning.ScreenPinningTestActivity"
             android:label="@string/screen_pinning_test">
             <intent-filter>
@@ -1438,16 +1581,102 @@
                        android:value="android.hardware.type.television:android.software.leanback:android.hardware.type.watch" />
         </activity>
 
-        <activity android:name=".tv.MockTvInputSettingsActivity">
+        <activity android:name=".tv.MockTvInputSetupActivity">
             <intent-filter>
                 <action android:name="android.intent.action.MAIN" />
             </intent-filter>
         </activity>
 
-        <activity android:name=".tv.MockTvInputSetupActivity">
+        <activity android:name=".audio.HifiUltrasoundTestActivity"
+                android:label="@string/hifi_ultrasound_test"
+                android:screenOrientation="locked">
             <intent-filter>
                 <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.cts.intent.category.MANUAL_TEST" />
             </intent-filter>
+            <meta-data android:name="test_category" android:value="@string/test_category_audio" />
+            <meta-data android:name="test_required_features" android:value="android.hardware.microphone" />
+        </activity>
+
+        <activity android:name=".audio.HifiUltrasoundSpeakerTestActivity"
+                android:label="@string/hifi_ultrasound_speaker_test"
+                android:screenOrientation="locked">
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.cts.intent.category.MANUAL_TEST" />
+            </intent-filter>
+            <meta-data android:name="test_category" android:value="@string/test_category_audio" />
+            <meta-data android:name="test_required_features" android:value="android.hardware.microphone" />
+        </activity>
+
+        <activity android:name=".audio.AudioDeviceNotificationsActivity"
+                  android:label="@string/audio_devices_notifications_test">
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.cts.intent.category.MANUAL_TEST" />
+            </intent-filter>
+            <meta-data android:name="test_category" android:value="@string/test_category_audio" />
+            <!--
+            <meta-data android:name="test_required_features" android:value="android.hardware.microphone" />
+            -->
+        </activity>
+
+        <activity android:name=".audio.AudioRoutingNotificationsActivity"
+                  android:label="@string/audio_routingnotifications_test">
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.cts.intent.category.MANUAL_TEST" />
+            </intent-filter>
+            <meta-data android:name="test_category" android:value="@string/test_category_audio" />
+            <!--
+            <meta-data android:name="test_required_features" android:value="android.hardware.microphone" />
+            -->
+        </activity>
+
+        <activity android:name=".audio.AudioLoopbackActivity"
+                  android:label="@string/audio_loopback_test">
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.cts.intent.category.MANUAL_TEST" />
+            </intent-filter>
+            <meta-data android:name="test_category" android:value="@string/test_category_audio" />
+            <meta-data android:name="test_required_features" android:value="android.hardware.microphone" />
+            <meta-data android:name="test_excluded_features" android:value="android.hardware.type.watch" />
+            <meta-data android:name="test_excluded_features" android:value="android.hardware.type.television" />
+        </activity>
+
+        <activity android:name=".audio.AudioFrequencyLineActivity"
+                  android:label="@string/audio_frequency_line_test">
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.cts.intent.category.MANUAL_TEST" />
+            </intent-filter>
+            <meta-data android:name="test_category" android:value="@string/test_category_audio" />
+            <meta-data android:name="test_required_features" android:value="android.hardware.microphone" />
+            <meta-data android:name="test_required_features" android:value="android.hardware.audio.output" />
+        </activity>
+
+        <activity android:name=".audio.AudioFrequencySpeakerActivity"
+                  android:label="@string/audio_frequency_speaker_test">
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.cts.intent.category.MANUAL_TEST" />
+            </intent-filter>
+            <meta-data android:name="test_category" android:value="@string/test_category_audio" />
+            <meta-data android:name="test_required_features" android:value="android.hardware.audio.output" />
+            <meta-data android:name="test_required_features" android:value="android.hardware.usb.host" />
+        </activity>
+
+        <activity android:name=".audio.AudioFrequencyMicActivity"
+                  android:label="@string/audio_frequency_mic_test">
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.cts.intent.category.MANUAL_TEST" />
+            </intent-filter>
+            <meta-data android:name="test_category" android:value="@string/test_category_audio" />
+            <meta-data android:name="test_required_features" android:value="android.hardware.microphone" />
+            <meta-data android:name="test_required_features" android:value="android.hardware.audio.output" />
+            <meta-data android:name="test_required_features" android:value="android.hardware.usb.host" />
         </activity>
 
         <service android:name=".tv.MockTvInputService"
diff --git a/apps/CtsVerifier/create_test_certs.sh b/apps/CtsVerifier/create_test_certs.sh
new file mode 100755
index 0000000..b59974a
--- /dev/null
+++ b/apps/CtsVerifier/create_test_certs.sh
@@ -0,0 +1,85 @@
+#!/bin/bash
+
+#
+# Creates or overwrites 3 files in ./res/raw:
+#   - cacert.der
+#   - userkey.der
+#   - usercert.der
+#
+
+tmpdir=$(mktemp -d './XXXXXXXX')
+trap 'rm -r ${tmpdir}; echo; exit 1' EXIT INT QUIT
+
+# CA_default defined in openssl.cnf
+CA_DIR='demoCA'
+
+SUBJECT=\
+'/C=US'\
+'/ST=CA'\
+'/L=Mountain View'\
+'/O=Android'\
+'/CN=localhost'
+PASSWORD='androidtest'
+
+echo "Creating directory '$CA_DIR'..."
+mkdir -p "$tmpdir"/"$CA_DIR"/newcerts \
+    && echo '01' > "$tmpdir"/"$CA_DIR"/serial \
+    && touch "$tmpdir"/"$CA_DIR"/index.txt
+
+echo "Generating CA certificate..."
+(cd "$tmpdir" \
+    && openssl req \
+        -new \
+        -x509 \
+        -days 3650 \
+        -extensions v3_ca \
+        -keyout 'cakey.pem' \
+        -out 'cacert.pem' \
+        -subj "$SUBJECT" \
+        -passout 'pass:'"$PASSWORD" \
+    && openssl x509 \
+        -outform DER \
+        -in 'cacert.pem' \
+        -out 'cacert.der')
+
+echo "Generating user key..."
+(cd "$tmpdir" \
+    && openssl req \
+        -newkey rsa:2048 \
+        -sha256 \
+        -keyout 'userkey.pem' \
+        -nodes \
+        -days 3650 \
+        -out 'userkey.req' \
+        -subj "$SUBJECT" \
+    && openssl pkcs8 \
+        -topk8 \
+        -outform DER \
+        -in 'userkey.pem' \
+        -out 'userkey.der' \
+        -nocrypt)
+
+echo "Generating user certificate..."
+(cd "$tmpdir" \
+    && openssl ca \
+        -out 'usercert.pem' \
+        -in 'userkey.req' \
+        -cert 'cacert.pem' \
+        -keyfile 'cakey.pem' \
+        -days 3650 \
+        -passin 'pass:'"$PASSWORD" \
+        -batch \
+    && openssl x509 \
+        -outform DER \
+        -in 'usercert.pem' \
+        -out 'usercert.der')
+
+# Copy important files to raw resources directory
+cp \
+    "$tmpdir"/cacert.der \
+    "$tmpdir"/userkey.der \
+    "$tmpdir"/usercert.der \
+    'res/raw/'
+
+echo "Finished"
+exit
diff --git a/apps/CtsVerifier/jni/audio_loopback/Android.mk b/apps/CtsVerifier/jni/audio_loopback/Android.mk
new file mode 100644
index 0000000..3dfbc34
--- /dev/null
+++ b/apps/CtsVerifier/jni/audio_loopback/Android.mk
@@ -0,0 +1,28 @@
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_MODULE      := libaudioloopback_jni
+LOCAL_MODULE_TAGS := optional
+LOCAL_SRC_FILES   := \
+	sles.cpp \
+	jni_sles.c
+
+LOCAL_C_INCLUDES := \
+        system/media/audio_utils/include \
+        frameworks/wilhelm/include
+
+LOCAL_SHARED_LIBRARIES := \
+	libutils \
+	libcutils \
+	libOpenSLES \
+	libnbaio \
+	liblog \
+	libaudioutils
+
+LOCAL_PRELINK_MODULE := false
+
+LOCAL_LDFLAGS := -Wl,--hash-style=sysv
+LOCAL_CFLAGS := -DSTDC_HEADERS
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/apps/CtsVerifier/jni/audio_loopback/jni_sles.c b/apps/CtsVerifier/jni/audio_loopback/jni_sles.c
new file mode 100644
index 0000000..a865078
--- /dev/null
+++ b/apps/CtsVerifier/jni/audio_loopback/jni_sles.c
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android/log.h>
+#include "sles.h"
+#include "jni_sles.h"
+#include <stdio.h>
+#include <stddef.h>
+
+/////
+JNIEXPORT jlong JNICALL Java_com_android_cts_verifier_audio_NativeAudioThread_slesInit
+  (JNIEnv *env __unused, jobject obj __unused, jint samplingRate, jint frameCount, jint micSource) {
+
+    sles_data * pSles = NULL;
+
+    if (slesInit(&pSles, samplingRate, frameCount, micSource) != SLES_FAIL) {
+
+        return (long)pSles;
+    }
+    // FIXME This should be stored as a (long) field in the object,
+    //       so that incorrect Java code could not synthesize a bad sles pointer.
+    return 0;
+}
+
+JNIEXPORT jint JNICALL Java_com_android_cts_verifier_audio_NativeAudioThread_slesProcessNext
+  (JNIEnv *env __unused, jobject obj __unused, jlong sles, jdoubleArray samplesArray,
+          jlong offset) {
+    sles_data * pSles= (sles_data*) ((long)sles);
+
+    long maxSamples = (*env)->GetArrayLength(env, samplesArray);
+    double *pSamples = (*env)->GetDoubleArrayElements(env, samplesArray,0);
+
+    long availableSamples = maxSamples-offset;
+    double *pCurrentSample = pSamples+offset;
+
+    SLES_PRINTF("jni slesProcessNext pSles:%p, currentSample %p, availableSamples %ld ", pSles,
+            pCurrentSample, availableSamples);
+
+    int samplesRead = slesProcessNext(pSles, pCurrentSample, availableSamples);
+
+    return samplesRead;
+}
+
+JNIEXPORT jint JNICALL Java_com_android_cts_verifier_audio_NativeAudioThread_slesDestroy
+  (JNIEnv *env __unused, jobject obj __unused, jlong sles) {
+    sles_data * pSles= (sles_data*) ((long) sles);
+
+    int status = slesDestroy(&pSles);
+
+    return status;
+}
diff --git a/apps/CtsVerifier/jni/audio_loopback/jni_sles.h b/apps/CtsVerifier/jni/audio_loopback/jni_sles.h
new file mode 100644
index 0000000..7bff040
--- /dev/null
+++ b/apps/CtsVerifier/jni/audio_loopback/jni_sles.h
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <jni.h>
+
+#ifndef _Included_org_drrickorang_loopback_jni
+#define _Included_org_drrickorang_loopback_jni
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+////////////////////////
+JNIEXPORT jlong JNICALL Java_com_android_cts_verifier_audio_NativeAudioThread_slesInit
+  (JNIEnv *, jobject, jint, jint, jint );
+
+JNIEXPORT jint JNICALL Java_com_android_cts_verifier_audio_NativeAudioThread_slesProcessNext
+  (JNIEnv *, jobject , jlong, jdoubleArray, jlong );
+
+JNIEXPORT jint JNICALL Java_com_android_cts_verifier_audio_NativeAudioThread_slesDestroy
+  (JNIEnv *, jobject , jlong );
+
+
+#ifdef __cplusplus
+}
+#endif
+#endif //_Included_org_drrickorang_loopback_jni
diff --git a/apps/CtsVerifier/jni/audio_loopback/sles.cpp b/apps/CtsVerifier/jni/audio_loopback/sles.cpp
new file mode 100644
index 0000000..7859d35
--- /dev/null
+++ b/apps/CtsVerifier/jni/audio_loopback/sles.cpp
@@ -0,0 +1,655 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+////////////////////////////////////////////
+/// Actual sles functions.
+
+
+// Test program to record from default audio input and playback to default audio output.
+// It will generate feedback (Larsen effect) if played through on-device speakers,
+// or acts as a delay if played through headset.
+
+#include "sles.h"
+#include <stdio.h>
+#include <stdlib.h>
+#include <stddef.h>
+
+#include <assert.h>
+#include <pthread.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <unistd.h>
+
+int slesInit(sles_data ** ppSles, int samplingRate, int frameCount, int micSource) {
+    int status = SLES_FAIL;
+    if (ppSles != NULL) {
+        sles_data * pSles = (sles_data*) calloc(1, sizeof (sles_data));
+
+        SLES_PRINTF("malloc %d bytes at %p",sizeof(sles_data), pSles);
+        *ppSles = pSles;
+        if (pSles != NULL)
+        {
+            SLES_PRINTF("creating server. Sampling rate =%d, frame count = %d",samplingRate,
+                    frameCount);
+            status = slesCreateServer(pSles, samplingRate, frameCount, micSource);
+            SLES_PRINTF("slesCreateServer =%d",status);
+        }
+    }
+    return status;
+}
+int slesDestroy(sles_data ** ppSles) {
+    int status = SLES_FAIL;
+    if (ppSles != NULL) {
+        slesDestroyServer(*ppSles);
+
+        if (*ppSles != NULL)
+        {
+            free(*ppSles);
+            *ppSles = 0;
+        }
+        status = SLES_SUCCESS;
+    }
+    return status;
+}
+
+#define ASSERT_EQ(x, y) do { if ((x) == (y)) ; else { fprintf(stderr, "0x%x != 0x%x\n", \
+        (unsigned) (x), (unsigned) (y)); assert((x) == (y)); } } while (0)
+
+
+// Called after audio recorder fills a buffer with data
+static void recorderCallback(SLAndroidSimpleBufferQueueItf caller __unused, void *context) {
+    sles_data *pSles = (sles_data*) context;
+    if (pSles != NULL) {
+
+
+
+        SLresult result;
+
+        pthread_mutex_lock(&(pSles->mutex));
+        //ee  SLES_PRINTF("<R");
+
+        // We should only be called when a recording buffer is done
+        assert(pSles->rxFront <= pSles->rxBufCount);
+        assert(pSles->rxRear <= pSles->rxBufCount);
+        assert(pSles->rxFront != pSles->rxRear);
+        char *buffer = pSles->rxBuffers[pSles->rxFront];
+
+        // Remove buffer from record queue
+        if (++pSles->rxFront > pSles->rxBufCount) {
+            pSles->rxFront = 0;
+        }
+
+        ssize_t actual = audio_utils_fifo_write(&(pSles->fifo), buffer,
+                (size_t) pSles->bufSizeInFrames);
+        if (actual != (ssize_t) pSles->bufSizeInFrames) {
+            write(1, "?", 1);
+        }
+
+        // This is called by a realtime (SCHED_FIFO) thread,
+        // and it is unsafe to do I/O as it could block for unbounded time.
+        // Flash filesystem is especially notorious for blocking.
+        if (pSles->fifo2Buffer != NULL) {
+            actual = audio_utils_fifo_write(&(pSles->fifo2), buffer,
+                    (size_t) pSles->bufSizeInFrames);
+            if (actual != (ssize_t) pSles->bufSizeInFrames) {
+                write(1, "?", 1);
+            }
+        }
+
+        // Enqueue this same buffer for the recorder to fill again.
+        result = (*(pSles->recorderBufferQueue))->Enqueue(pSles->recorderBufferQueue, buffer,
+                pSles->bufSizeInBytes);
+        ASSERT_EQ(SL_RESULT_SUCCESS, result);
+
+        // Update our model of the record queue
+        SLuint32 rxRearNext = pSles->rxRear+1;
+        if (rxRearNext > pSles->rxBufCount) {
+            rxRearNext = 0;
+        }
+        assert(rxRearNext != pSles->rxFront);
+        pSles->rxBuffers[pSles->rxRear] = buffer;
+        pSles->rxRear = rxRearNext;
+
+
+
+        //ee  SLES_PRINTF("r>");
+        pthread_mutex_unlock(&(pSles->mutex));
+
+    } //pSles not null
+}
+
+
+// Called after audio player empties a buffer of data
+static void playerCallback(SLBufferQueueItf caller __unused, void *context) {
+    sles_data *pSles = (sles_data*) context;
+    if (pSles != NULL) {
+
+        SLresult result;
+
+        pthread_mutex_lock(&(pSles->mutex));
+        //ee  SLES_PRINTF("<P");
+
+        // Get the buffer that just finished playing
+        assert(pSles->txFront <= pSles->txBufCount);
+        assert(pSles->txRear <= pSles->txBufCount);
+        assert(pSles->txFront != pSles->txRear);
+        char *buffer = pSles->txBuffers[pSles->txFront];
+        if (++pSles->txFront > pSles->txBufCount) {
+            pSles->txFront = 0;
+        }
+
+
+        ssize_t actual = audio_utils_fifo_read(&(pSles->fifo), buffer, pSles->bufSizeInFrames);
+        if (actual != (ssize_t) pSles->bufSizeInFrames) {
+            write(1, "/", 1);
+            // on underrun from pipe, substitute silence
+            memset(buffer, 0, pSles->bufSizeInFrames * pSles->channels * sizeof(short));
+        }
+
+        if (pSles->injectImpulse == -1) {
+            // Experimentally, a single frame impulse was insufficient to trigger feedback.
+            // Also a Nyquist frequency signal was also insufficient, probably because
+            // the response of output and/or input path was not adequate at high frequencies.
+            // This short burst of a few cycles of square wave at Nyquist/4 was found to work well.
+            for (unsigned i = 0; i < pSles->bufSizeInFrames / 8; i += 8) {
+                for (int j = 0; j < 8; j++) {
+                    for (unsigned k = 0; k < pSles->channels; k++) {
+                        ((short *)buffer)[(i+j)*pSles->channels+k] = j < 4 ? 0x7FFF : 0x8000;
+                    }
+                }
+            }
+            pSles->injectImpulse = 0;
+        }
+
+        // Enqueue the filled buffer for playing
+        result = (*(pSles->playerBufferQueue))->Enqueue(pSles->playerBufferQueue, buffer,
+                pSles->bufSizeInBytes);
+        ASSERT_EQ(SL_RESULT_SUCCESS, result);
+
+        // Update our model of the player queue
+        assert(pSles->txFront <= pSles->txBufCount);
+        assert(pSles->txRear <= pSles->txBufCount);
+        SLuint32 txRearNext = pSles->txRear+1;
+        if (txRearNext > pSles->txBufCount) {
+            txRearNext = 0;
+        }
+        assert(txRearNext != pSles->txFront);
+        pSles->txBuffers[pSles->txRear] = buffer;
+        pSles->txRear = txRearNext;
+
+
+        //ee    SLES_PRINTF("p>");
+        pthread_mutex_unlock(&(pSles->mutex));
+
+    } //pSles not null
+}
+
+int slesCreateServer(sles_data *pSles, int samplingRate, int frameCount, int micSource) {
+    int status = SLES_FAIL;
+
+    if (pSles == NULL) {
+        return status;
+    }
+
+    //        adb shell slesTest_feedback -r1 -t1 -s48000 -f240 -i300 -e3 -o/sdcard/log.wav
+    //            r1 and t1 are the receive and transmit buffer counts, typically 1
+    //            s is the sample rate, typically 48000 or 44100
+    //            f is the frame count per buffer, typically 240 or 256
+    //            i is the number of milliseconds before impulse.  You may need to adjust this.
+    //            e is number of seconds to record
+    //            o is output .wav file name
+
+
+    //        // default values
+    //        SLuint32 rxBufCount = 1;     // -r#
+    //        SLuint32 txBufCount = 1;     // -t#
+    //        SLuint32 bufSizeInFrames = 240;  // -f#
+    //        SLuint32 channels = 1;       // -c#
+    //        SLuint32 sampleRate = 48000; // -s#
+    //        SLuint32 exitAfterSeconds = 3; // -e#
+    //        SLuint32 freeBufCount = 0;   // calculated
+    //        SLuint32 bufSizeInBytes = 0; // calculated
+    //        int injectImpulse = 300; // -i#i
+    //
+    //        // Storage area for the buffer queues
+    //        char **rxBuffers;
+    //        char **txBuffers;
+    //        char **freeBuffers;
+    //
+    //        // Buffer indices
+    //        SLuint32 rxFront;    // oldest recording
+    //        SLuint32 rxRear;     // next to be recorded
+    //        SLuint32 txFront;    // oldest playing
+    //        SLuint32 txRear;     // next to be played
+    //        SLuint32 freeFront;  // oldest free
+    //        SLuint32 freeRear;   // next to be freed
+    //
+    //        audio_utils_fifo fifo; //(*)
+    //        SLAndroidSimpleBufferQueueItf recorderBufferQueue;
+    //        SLBufferQueueItf playerBufferQueue;
+
+    // default values
+    pSles->rxBufCount = 1;     // -r#
+    pSles->txBufCount = 1;     // -t#
+    pSles->bufSizeInFrames = frameCount;//240;  // -f#
+    pSles->channels = 1;       // -c#
+    pSles->sampleRate = samplingRate;//48000; // -s#
+    pSles->exitAfterSeconds = 3; // -e#
+    pSles->freeBufCount = 0;   // calculated
+    pSles->bufSizeInBytes = 0; // calculated
+    pSles->injectImpulse = 300; // -i#i
+
+    // Storage area for the buffer queues
+    //        char **rxBuffers;
+    //        char **txBuffers;
+    //        char **freeBuffers;
+
+    // Buffer indices
+    pSles->rxFront;    // oldest recording
+    pSles->rxRear;     // next to be recorded
+    pSles->txFront;    // oldest playing
+    pSles->txRear;     // next to be played
+    pSles->freeFront;  // oldest free
+    pSles->freeRear;   // next to be freed
+
+    pSles->fifo; //(*)
+    pSles->fifo2Buffer = NULL;
+    pSles->recorderBufferQueue;
+    pSles->playerBufferQueue;
+
+    // compute total free buffers as -r plus -t
+    pSles->freeBufCount = pSles->rxBufCount + pSles->txBufCount;
+    // compute buffer size
+    pSles->bufSizeInBytes = pSles->channels * pSles->bufSizeInFrames * sizeof(short);
+
+    // Initialize free buffers
+    pSles->freeBuffers = (char **) calloc(pSles->freeBufCount+1, sizeof(char *));
+    unsigned j;
+    for (j = 0; j < pSles->freeBufCount; ++j) {
+        pSles->freeBuffers[j] = (char *) malloc(pSles->bufSizeInBytes);
+    }
+    pSles->freeFront = 0;
+    pSles->freeRear = pSles->freeBufCount;
+    pSles->freeBuffers[j] = NULL;
+
+    // Initialize record queue
+    pSles->rxBuffers = (char **) calloc(pSles->rxBufCount+1, sizeof(char *));
+    pSles->rxFront = 0;
+    pSles->rxRear = 0;
+
+    // Initialize play queue
+    pSles->txBuffers = (char **) calloc(pSles->txBufCount+1, sizeof(char *));
+    pSles->txFront = 0;
+    pSles->txRear = 0;
+
+    size_t frameSize = pSles->channels * sizeof(short);
+#define FIFO_FRAMES 1024
+    pSles->fifoBuffer = new short[FIFO_FRAMES * pSles->channels];
+    audio_utils_fifo_init(&(pSles->fifo), FIFO_FRAMES, frameSize, pSles->fifoBuffer);
+
+    //        SNDFILE *sndfile;
+    //        if (outFileName != NULL) {
+    // create .wav writer
+    //            SF_INFO info;
+    //            info.frames = 0;
+    //            info.samplerate = sampleRate;
+    //            info.channels = channels;
+    //            info.format = SF_FORMAT_WAV | SF_FORMAT_PCM_16;
+    //            sndfile = sf_open(outFileName, SFM_WRITE, &info);
+    //            if (sndfile != NULL) {
+#define FIFO2_FRAMES 65536
+    pSles->fifo2Buffer = new short[FIFO2_FRAMES * pSles->channels];
+    audio_utils_fifo_init(&(pSles->fifo2), FIFO2_FRAMES, frameSize, pSles->fifo2Buffer);
+    //            } else {
+    //                fprintf(stderr, "sf_open failed\n");
+    //            }
+    //        } else {
+    //            sndfile = NULL;
+    //        }
+
+    SLresult result;
+
+    // create engine
+    pSles->engineObject;
+    result = slCreateEngine(&(pSles->engineObject), 0, NULL, 0, NULL, NULL);
+    ASSERT_EQ(SL_RESULT_SUCCESS, result);
+    result = (*(pSles->engineObject))->Realize(pSles->engineObject, SL_BOOLEAN_FALSE);
+    ASSERT_EQ(SL_RESULT_SUCCESS, result);
+    SLEngineItf engineEngine;
+    result = (*(pSles->engineObject))->GetInterface(pSles->engineObject, SL_IID_ENGINE,
+            &engineEngine);
+    ASSERT_EQ(SL_RESULT_SUCCESS, result);
+
+    // create output mix
+    pSles->outputmixObject;
+    result = (*engineEngine)->CreateOutputMix(engineEngine, &(pSles->outputmixObject), 0, NULL,
+            NULL);
+    ASSERT_EQ(SL_RESULT_SUCCESS, result);
+    result = (*(pSles->outputmixObject))->Realize(pSles->outputmixObject, SL_BOOLEAN_FALSE);
+    ASSERT_EQ(SL_RESULT_SUCCESS, result);
+
+    // create an audio player with buffer queue source and output mix sink
+    SLDataSource audiosrc;
+    SLDataSink audiosnk;
+    SLDataFormat_PCM pcm;
+    SLDataLocator_OutputMix locator_outputmix;
+    SLDataLocator_BufferQueue locator_bufferqueue_tx;
+    locator_bufferqueue_tx.locatorType = SL_DATALOCATOR_BUFFERQUEUE;
+    locator_bufferqueue_tx.numBuffers = pSles->txBufCount;
+    locator_outputmix.locatorType = SL_DATALOCATOR_OUTPUTMIX;
+    locator_outputmix.outputMix = pSles->outputmixObject;
+    pcm.formatType = SL_DATAFORMAT_PCM;
+    pcm.numChannels = pSles->channels;
+    pcm.samplesPerSec = pSles->sampleRate * 1000;
+    pcm.bitsPerSample = SL_PCMSAMPLEFORMAT_FIXED_16;
+    pcm.containerSize = 16;
+    pcm.channelMask = pSles->channels == 1 ? SL_SPEAKER_FRONT_CENTER :
+            (SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT);
+    pcm.endianness = SL_BYTEORDER_LITTLEENDIAN;
+    audiosrc.pLocator = &locator_bufferqueue_tx;
+    audiosrc.pFormat = &pcm;
+    audiosnk.pLocator = &locator_outputmix;
+    audiosnk.pFormat = NULL;
+    pSles->playerObject = NULL;
+    pSles->recorderObject = NULL;
+    SLInterfaceID ids_tx[1] = {SL_IID_BUFFERQUEUE};
+    SLboolean flags_tx[1] = {SL_BOOLEAN_TRUE};
+    result = (*engineEngine)->CreateAudioPlayer(engineEngine, &(pSles->playerObject),
+            &audiosrc, &audiosnk, 1, ids_tx, flags_tx);
+    if (SL_RESULT_CONTENT_UNSUPPORTED == result) {
+        fprintf(stderr, "Could not create audio player (result %x), check sample rate\n",
+                result);
+        SLES_PRINTF("ERROR: Could not create audio player (result %x), check sample rate\n",
+                result);
+        goto cleanup;
+    }
+    ASSERT_EQ(SL_RESULT_SUCCESS, result);
+    result = (*(pSles->playerObject))->Realize(pSles->playerObject, SL_BOOLEAN_FALSE);
+    ASSERT_EQ(SL_RESULT_SUCCESS, result);
+    SLPlayItf playerPlay;
+    result = (*(pSles->playerObject))->GetInterface(pSles->playerObject, SL_IID_PLAY,
+            &playerPlay);
+    ASSERT_EQ(SL_RESULT_SUCCESS, result);
+    result = (*(pSles->playerObject))->GetInterface(pSles->playerObject, SL_IID_BUFFERQUEUE,
+            &(pSles->playerBufferQueue));
+    ASSERT_EQ(SL_RESULT_SUCCESS, result);
+    result = (*(pSles->playerBufferQueue))->RegisterCallback(pSles->playerBufferQueue,
+            playerCallback, pSles);
+    ASSERT_EQ(SL_RESULT_SUCCESS, result);
+
+    // Enqueue some zero buffers for the player
+    for (j = 0; j < pSles->txBufCount; ++j) {
+
+        // allocate a free buffer
+        assert(pSles->freeFront != pSles->freeRear);
+        char *buffer = pSles->freeBuffers[pSles->freeFront];
+        if (++pSles->freeFront > pSles->freeBufCount) {
+            pSles->freeFront = 0;
+        }
+
+        // put on play queue
+        SLuint32 txRearNext = pSles->txRear + 1;
+        if (txRearNext > pSles->txBufCount) {
+            txRearNext = 0;
+        }
+        assert(txRearNext != pSles->txFront);
+        pSles->txBuffers[pSles->txRear] = buffer;
+        pSles->txRear = txRearNext;
+        result = (*(pSles->playerBufferQueue))->Enqueue(pSles->playerBufferQueue,
+                buffer, pSles->bufSizeInBytes);
+        ASSERT_EQ(SL_RESULT_SUCCESS, result);
+    }
+
+    result = (*playerPlay)->SetPlayState(playerPlay, SL_PLAYSTATE_PLAYING);
+    ASSERT_EQ(SL_RESULT_SUCCESS, result);
+
+    // Create an audio recorder with microphone device source and buffer queue sink.
+    // The buffer queue as sink is an Android-specific extension.
+
+    SLDataLocator_IODevice locator_iodevice;
+    SLDataLocator_AndroidSimpleBufferQueue locator_bufferqueue_rx;
+    locator_iodevice.locatorType = SL_DATALOCATOR_IODEVICE;
+    locator_iodevice.deviceType = SL_IODEVICE_AUDIOINPUT;
+    locator_iodevice.deviceID = SL_DEFAULTDEVICEID_AUDIOINPUT;
+    locator_iodevice.device = NULL;
+    audiosrc.pLocator = &locator_iodevice;
+    audiosrc.pFormat = NULL;
+    locator_bufferqueue_rx.locatorType = SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE;
+    locator_bufferqueue_rx.numBuffers = pSles->rxBufCount;
+    audiosnk.pLocator = &locator_bufferqueue_rx;
+    audiosnk.pFormat = &pcm;
+    {
+        SLInterfaceID ids_rx[2] = {SL_IID_ANDROIDSIMPLEBUFFERQUEUE,
+                SL_IID_ANDROIDCONFIGURATION};
+        SLboolean flags_rx[2] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE};
+        result = (*engineEngine)->CreateAudioRecorder(engineEngine, &(pSles->recorderObject),
+                &audiosrc, &audiosnk, 2, ids_rx, flags_rx);
+        if (SL_RESULT_SUCCESS != result) {
+            fprintf(stderr, "Could not create audio recorder (result %x), "
+                    "check sample rate and channel count\n", result);
+            status = SLES_FAIL;
+
+            SLES_PRINTF("ERROR: Could not create audio recorder (result %x), "
+                    "check sample rate and channel count\n", result);
+            goto cleanup;
+        }
+    }
+    ASSERT_EQ(SL_RESULT_SUCCESS, result);
+
+    {
+        /* Get the Android configuration interface which is explicit */
+        SLAndroidConfigurationItf configItf;
+        result = (*(pSles->recorderObject))->GetInterface(pSles->recorderObject,
+                SL_IID_ANDROIDCONFIGURATION, (void*)&configItf);
+        ASSERT_EQ(SL_RESULT_SUCCESS, result);
+        SLuint32 presetValue = micSource;
+        /* Use the configuration interface to configure the recorder before it's realized */
+        if (presetValue != SL_ANDROID_RECORDING_PRESET_NONE) {
+            result = (*configItf)->SetConfiguration(configItf, SL_ANDROID_KEY_RECORDING_PRESET,
+                    &presetValue, sizeof(SLuint32));
+            ASSERT_EQ(SL_RESULT_SUCCESS, result);
+        }
+
+    }
+
+    result = (*(pSles->recorderObject))->Realize(pSles->recorderObject, SL_BOOLEAN_FALSE);
+    ASSERT_EQ(SL_RESULT_SUCCESS, result);
+    SLRecordItf recorderRecord;
+    result = (*(pSles->recorderObject))->GetInterface(pSles->recorderObject, SL_IID_RECORD,
+            &recorderRecord);
+    ASSERT_EQ(SL_RESULT_SUCCESS, result);
+    result = (*(pSles->recorderObject))->GetInterface(pSles->recorderObject,
+            SL_IID_ANDROIDSIMPLEBUFFERQUEUE, &(pSles->recorderBufferQueue));
+    ASSERT_EQ(SL_RESULT_SUCCESS, result);
+    result = (*(pSles->recorderBufferQueue))->RegisterCallback(pSles->recorderBufferQueue,
+            recorderCallback, pSles);
+    ASSERT_EQ(SL_RESULT_SUCCESS, result);
+
+    // Enqueue some empty buffers for the recorder
+    for (j = 0; j < pSles->rxBufCount; ++j) {
+
+        // allocate a free buffer
+        assert(pSles->freeFront != pSles->freeRear);
+        char *buffer = pSles->freeBuffers[pSles->freeFront];
+        if (++pSles->freeFront > pSles->freeBufCount) {
+            pSles->freeFront = 0;
+        }
+
+        // put on record queue
+        SLuint32 rxRearNext = pSles->rxRear + 1;
+        if (rxRearNext > pSles->rxBufCount) {
+            rxRearNext = 0;
+        }
+        assert(rxRearNext != pSles->rxFront);
+        pSles->rxBuffers[pSles->rxRear] = buffer;
+        pSles->rxRear = rxRearNext;
+        result = (*(pSles->recorderBufferQueue))->Enqueue(pSles->recorderBufferQueue,
+                buffer, pSles->bufSizeInBytes);
+        ASSERT_EQ(SL_RESULT_SUCCESS, result);
+    }
+
+    // Kick off the recorder
+    result = (*recorderRecord)->SetRecordState(recorderRecord, SL_RECORDSTATE_RECORDING);
+    ASSERT_EQ(SL_RESULT_SUCCESS, result);
+
+    // Tear down the objects and exit
+    status = SLES_SUCCESS;
+    cleanup:
+    SLES_PRINTF("Finished initialization with status: %d", status);
+
+    return status;
+}
+
+int slesProcessNext(sles_data *pSles, double *pSamples, long maxSamples) {
+    //int status = SLES_FAIL;
+
+    SLES_PRINTF("slesProcessNext: pSles = %p, currentSample: %p,  maxSamples = %ld", pSles,
+            pSamples, maxSamples);
+
+    int samplesRead = 0;
+
+    int currentSample = 0;
+    double *pCurrentSample = pSamples;
+    int maxValue = 32768;
+
+    if (pSles == NULL) {
+        return samplesRead;
+    }
+
+    SLresult result;
+    for (int i = 0; i < 10; i++) {
+        usleep(100000);
+        if (pSles->fifo2Buffer != NULL) {
+            for (;;) {
+                short buffer[pSles->bufSizeInFrames * pSles->channels];
+                ssize_t actual = audio_utils_fifo_read(&(pSles->fifo2), buffer,
+                        pSles->bufSizeInFrames);
+                if (actual <= 0)
+                    break;
+                {
+                    for (int jj =0; jj<actual && currentSample < maxSamples; jj++) {
+                        *(pCurrentSample++) = ((double)buffer[jj])/maxValue;
+                        currentSample++;
+                    }
+                }
+                samplesRead +=actual;
+            }
+        }
+        if (pSles->injectImpulse > 0) {
+            if (pSles->injectImpulse <= 100) {
+                pSles->injectImpulse = -1;
+                write(1, "I", 1);
+            } else {
+                if ((pSles->injectImpulse % 1000) < 100) {
+                    write(1, "i", 1);
+                }
+                pSles->injectImpulse -= 100;
+            }
+        } else if (i == 9) {
+            write(1, ".", 1);
+        }
+    }
+    SLBufferQueueState playerBQState;
+    result = (*(pSles->playerBufferQueue))->GetState(pSles->playerBufferQueue,
+            &playerBQState);
+    ASSERT_EQ(SL_RESULT_SUCCESS, result);
+    SLAndroidSimpleBufferQueueState recorderBQState;
+    result = (*(pSles->recorderBufferQueue))->GetState(pSles->recorderBufferQueue,
+            &recorderBQState);
+    ASSERT_EQ(SL_RESULT_SUCCESS, result);
+
+    SLES_PRINTF("End of slesProcessNext: pSles = %p, samplesRead = %d, maxSamples= %ld", pSles,
+            samplesRead, maxSamples);
+
+    return samplesRead;
+}
+
+int slesDestroyServer(sles_data *pSles) {
+    int status = SLES_FAIL;
+
+    SLES_PRINTF("Start slesDestroyServer: pSles = %p", pSles);
+    if (pSles == NULL) {
+        return status;
+    }
+
+    if (NULL != pSles->playerObject) {
+
+        SLES_PRINTF("stopping player...");
+        SLPlayItf playerPlay;
+        SLresult result = (*(pSles->playerObject))->GetInterface(pSles->playerObject,
+                SL_IID_PLAY, &playerPlay);
+
+        ASSERT_EQ(SL_RESULT_SUCCESS, result);
+
+        //stop player and recorder if they exist
+        result = (*playerPlay)->SetPlayState(playerPlay, SL_PLAYSTATE_STOPPED);
+        ASSERT_EQ(SL_RESULT_SUCCESS, result);
+    }
+
+    if (NULL != pSles->recorderObject) {
+        SLES_PRINTF("stopping recorder...");
+        SLRecordItf recorderRecord;
+        SLresult result = (*(pSles->recorderObject))->GetInterface(pSles->recorderObject,
+                SL_IID_RECORD, &recorderRecord);
+        ASSERT_EQ(SL_RESULT_SUCCESS, result);
+
+        result = (*recorderRecord)->SetRecordState(recorderRecord, SL_RECORDSTATE_STOPPED);
+        ASSERT_EQ(SL_RESULT_SUCCESS, result);
+    }
+
+    usleep(1000);
+
+    audio_utils_fifo_deinit(&(pSles->fifo));
+    delete[] pSles->fifoBuffer;
+
+    SLES_PRINTF("slesDestroyServer 2");
+
+    //        if (sndfile != NULL) {
+    audio_utils_fifo_deinit(&(pSles->fifo2));
+    delete[] pSles->fifo2Buffer;
+
+    SLES_PRINTF("slesDestroyServer 3");
+
+    //            sf_close(sndfile);
+    //        }
+    if (NULL != pSles->playerObject) {
+        (*(pSles->playerObject))->Destroy(pSles->playerObject);
+    }
+
+    SLES_PRINTF("slesDestroyServer 4");
+
+    if (NULL != pSles->recorderObject) {
+        (*(pSles->recorderObject))->Destroy(pSles->recorderObject);
+    }
+
+    SLES_PRINTF("slesDestroyServer 5");
+
+    (*(pSles->outputmixObject))->Destroy(pSles->outputmixObject);
+    SLES_PRINTF("slesDestroyServer 6");
+    (*(pSles->engineObject))->Destroy(pSles->engineObject);
+    SLES_PRINTF("slesDestroyServer 7");
+
+    //        free(pSles);
+    //        pSles=NULL;
+
+    status = SLES_SUCCESS;
+
+    SLES_PRINTF("End slesDestroyServer: status = %d", status);
+    return status;
+}
+
diff --git a/apps/CtsVerifier/jni/audio_loopback/sles.h b/apps/CtsVerifier/jni/audio_loopback/sles.h
new file mode 100644
index 0000000..2550b81
--- /dev/null
+++ b/apps/CtsVerifier/jni/audio_loopback/sles.h
@@ -0,0 +1,94 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <SLES/OpenSLES.h>
+#include <SLES/OpenSLES_Android.h>
+#include <pthread.h>
+#include <android/log.h>
+
+#ifndef _Included_org_drrickorang_loopback_sles
+#define _Included_org_drrickorang_loopback_sles
+
+//struct audio_utils_fifo;
+#define SLES_PRINTF(...)  __android_log_print(ANDROID_LOG_INFO, "sles_jni", __VA_ARGS__);
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+#include <audio_utils/fifo.h>
+
+typedef struct {
+    SLuint32 rxBufCount;     // -r#
+    SLuint32 txBufCount;     // -t#
+    SLuint32 bufSizeInFrames;  // -f#
+    SLuint32 channels;       // -c#
+    SLuint32 sampleRate; // -s#
+    SLuint32 exitAfterSeconds; // -e#
+    SLuint32 freeBufCount;   // calculated
+    SLuint32 bufSizeInBytes; // calculated
+    int injectImpulse; // -i#i
+
+    // Storage area for the buffer queues
+    char **rxBuffers;
+    char **txBuffers;
+    char **freeBuffers;
+
+    // Buffer indices
+    SLuint32 rxFront;    // oldest recording
+    SLuint32 rxRear;     // next to be recorded
+    SLuint32 txFront;    // oldest playing
+    SLuint32 txRear;     // next to be played
+    SLuint32 freeFront;  // oldest free
+    SLuint32 freeRear;   // next to be freed
+
+    struct audio_utils_fifo fifo; //(*)
+    struct audio_utils_fifo fifo2;
+    short *fifo2Buffer;
+    short *fifoBuffer;
+    SLAndroidSimpleBufferQueueItf recorderBufferQueue;
+    SLBufferQueueItf playerBufferQueue;
+
+    pthread_mutex_t mutex;// = PTHREAD_MUTEX_INITIALIZER;
+
+    //other things that belong here
+    SLObjectItf playerObject;
+    SLObjectItf recorderObject;
+    SLObjectItf outputmixObject;
+    SLObjectItf engineObject;
+} sles_data;
+
+enum {
+    SLES_SUCCESS = 0,
+    SLES_FAIL = 1,
+} SLES_STATUS_ENUM;
+
+int slesInit(sles_data ** ppSles, int samplingRate, int frameCount, int micSource);
+
+//note the double pointer to properly free the memory of the structure
+int slesDestroy(sles_data ** ppSles);
+
+///full
+int slesFull(sles_data *pSles);
+
+int slesCreateServer(sles_data *pSles, int samplingRate, int frameCount, int micSource);
+int slesProcessNext(sles_data *pSles, double *pSamples, long maxSamples);
+int slesDestroyServer(sles_data *pSles);
+
+#ifdef __cplusplus
+}
+#endif
+#endif //_Included_org_drrickorang_loopback_sles
diff --git a/apps/CtsVerifier/jni/cameraanalyzer/Android.mk b/apps/CtsVerifier/jni/cameraanalyzer/Android.mk
deleted file mode 100644
index d595a20..0000000
--- a/apps/CtsVerifier/jni/cameraanalyzer/Android.mk
+++ /dev/null
@@ -1,42 +0,0 @@
-# Copyright (C) 2011 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-LOCAL_PATH := $(call my-dir)
-
-include $(CLEAR_VARS)
-LOCAL_ADDITIONAL_DEPENDENCIES := $(LOCAL_PATH)/Android.mk
-
-LOCAL_MODULE := libcameraanalyzer
-
-LOCAL_MODULE_TAGS := optional
-
-LOCAL_SRC_FILES := com_android_cts_verifier_camera_analyzer_CameraTests.cpp \
-                com_android_cts_verifier_camera_analyzer_ColorCheckerTest.cpp \
-                com_android_cts_verifier_camera_analyzer_ExposureCompensationTest.cpp \
-                com_android_cts_verifier_camera_analyzer_AutoLockTest.cpp \
-                com_android_cts_verifier_camera_analyzer_MeteringTest.cpp \
-                com_android_cts_verifier_camera_analyzer_WhiteBalanceTest.cpp
-
-LOCAL_C_INCLUDES += $(LOCAL_PATH)/../../include/colorchecker $(JNI_H_INCLUDE)
-
-LOCAL_CXX_STL := libc++
-LOCAL_STATIC_LIBRARIES := libcolorchecker
-LOCAL_SHARED_LIBRARIES := \
-    libjnigraphics \
-    libcutils \
-    libutils \
-    liblog \
-
-include $(BUILD_SHARED_LIBRARY)
diff --git a/apps/CtsVerifier/jni/cameraanalyzer/com_android_cts_verifier_camera_analyzer_AutoLockTest.cpp b/apps/CtsVerifier/jni/cameraanalyzer/com_android_cts_verifier_camera_analyzer_AutoLockTest.cpp
deleted file mode 100644
index fac39e1..0000000
--- a/apps/CtsVerifier/jni/cameraanalyzer/com_android_cts_verifier_camera_analyzer_AutoLockTest.cpp
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#define LOG_NDEBUG 0
-
-#define LOG_TAG "AutoLockJNI"
-#include <utils/Log.h>
-#include "com_android_cts_verifier_camera_analyzer_AutoLockTest.h"
-
-#include <vector>
-#include <string>
-#include <string.h>
-
-#include "testingimage.h"
-#include "autolocktest.h"
-#include "vec2.h"
-#include "android/bitmap.h"
-
-jlong Java_com_android_cts_verifier_camera_analyzer_AutoLockTest_createAutoLockTest(
-        JNIEnv*      env,
-        jobject      thiz) {
-
-    AutoLockTest* testHandler = new AutoLockTest();
-    long handlerAddress = (long)testHandler;
-    return handlerAddress;
-}
-
-void Java_com_android_cts_verifier_camera_analyzer_AutoLockTest_createAutoLockClass(
-        JNIEnv*      env,
-        jobject      thiz,
-        jlong        inputImageAddress,
-        jlong        inputHandlerAddress,
-        jlong        checkercenterAddress,
-        jlong        checkerradiusAddress) {
-
-    ALOGV("JNI createAutoLockClass starts!");
-    long imageAddress = (long)inputImageAddress;
-    long handlerAddress = (long)inputHandlerAddress;
-
-    TestingImage *image = (TestingImage*) imageAddress;
-    AutoLockTest *testHandler = (AutoLockTest*) handlerAddress;
-
-    std::vector<std::vector< Vec2f > >* checkerCenter =
-            (std::vector<std::vector< Vec2f > >*) (long) checkercenterAddress;
-    std::vector<std::vector< float > >* checkerRadius =
-            (std::vector<std::vector< float > >*) (long) checkerradiusAddress;
-    ALOGV("Classes recovered");
-
-    // Uses only the gray patches on the color checker for comparison.
-    testHandler->addDataToList(image->getColorChecker(3, 4, 0, 6,
-                                                      checkerCenter,
-                                                      checkerRadius));
-
-    delete image;
-}
-
-void Java_com_android_cts_verifier_camera_analyzer_AutoLockTest_processAutoLockTest(
-        JNIEnv*          env,
-        jobject          thiz,
-        jlong            inputHandlerAddress,
-        jbooleanArray    tempArray) {
-
-    ALOGV("Processing Auto Lock data!");
-
-    long handlerAddress = (long) inputHandlerAddress;
-    AutoLockTest *testHandler = (AutoLockTest*) handlerAddress;
-
-    testHandler->processData();
-
-    // Converts the native boolean array into a java boolean array.
-    const std::vector<bool>* nativeComparisonResults =
-            testHandler->getComparisonResults();
-    jboolean comparisonResults[nativeComparisonResults->size()];
-
-    for (int i = 0; i < nativeComparisonResults->size(); ++i) {
-        comparisonResults[i] = (jboolean) (*nativeComparisonResults)[i];
-    }
-
-    env->SetBooleanArrayRegion(tempArray,
-                               0, nativeComparisonResults->size(),
-                               comparisonResults);
-    testHandler->clearData();
-}
diff --git a/apps/CtsVerifier/jni/cameraanalyzer/com_android_cts_verifier_camera_analyzer_AutoLockTest.h b/apps/CtsVerifier/jni/cameraanalyzer/com_android_cts_verifier_camera_analyzer_AutoLockTest.h
deleted file mode 100644
index dc40bc2..0000000
--- a/apps/CtsVerifier/jni/cameraanalyzer/com_android_cts_verifier_camera_analyzer_AutoLockTest.h
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef JNI_CAMERAANALYZER_AUTOLOCKTEST_H
-#define JNI_CAMERAANALYZER_AUTOLOCKTEST_H
-
-#include <jni.h>
-#include <stdio.h>
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-JNIEXPORT jlong JNICALL
-Java_com_android_cts_verifier_camera_analyzer_AutoLockTest_createAutoLockTest(
-        JNIEnv*      env,
-        jobject      thiz);
-
-JNIEXPORT void JNICALL
-Java_com_android_cts_verifier_camera_analyzer_AutoLockTest_createAutoLockClass(
-        JNIEnv *env,
-        jobject thiz,
-        jlong inputImageAddress,
-        jlong inputHandlerAddress,
-        jlong checkercenterAddress,
-        jlong checkerradiusAddress);
-
-JNIEXPORT void JNICALL
-Java_com_android_cts_verifier_camera_analyzer_AutoLockTest_processAutoLockTest(
-        JNIEnv*      env,
-        jobject      thiz,
-        jlong        inputHandlerAddress,
-        jbooleanArray    tempArray);
-
-#ifdef __cplusplus
-}
-#endif
-
-#endif
diff --git a/apps/CtsVerifier/jni/cameraanalyzer/com_android_cts_verifier_camera_analyzer_CameraTests.cpp b/apps/CtsVerifier/jni/cameraanalyzer/com_android_cts_verifier_camera_analyzer_CameraTests.cpp
deleted file mode 100644
index ed91233..0000000
--- a/apps/CtsVerifier/jni/cameraanalyzer/com_android_cts_verifier_camera_analyzer_CameraTests.cpp
+++ /dev/null
@@ -1,223 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#define LOG_NDEBUG 0
-
-#define LOG_TAG "CameraTestsJNI"
-#include <utils/Log.h>
-#include "com_android_cts_verifier_camera_analyzer_CameraTests.h"
-#include "android/bitmap.h"
-#include "testingimage.h"
-#include "imagetesthandler.h"
-
-#include <string.h>
-
-jlong Java_com_android_cts_verifier_camera_analyzer_CameraTests_findNative(
-        JNIEnv*      env,
-        jobject      thiz,
-        jobject      inputBitmap) {
-
-    ALOGV("JNI findNative starts!");
-
-    // Verify that we can handle the input bitmap
-    AndroidBitmapInfo inputInfo;
-    AndroidBitmap_getInfo(env, inputBitmap, &inputInfo);
-    if (inputInfo.format != ANDROID_BITMAP_FORMAT_RGBA_8888 &&
-        inputInfo.format != ANDROID_BITMAP_FORMAT_RGB_565) {
-        ALOGE("Only RGBA_8888 and RGB_565 bitmaps are supported, type was %d.",
-             inputInfo.format);
-    }
-
-    // Get some references to the fields and class type of ColorChecker
-    jclass thizCls = env->GetObjectClass(thiz);
-    ALOGV("ColorChecker field and classes reference finished!");
-
-    // Get raw inputs and outputs ready
-    uint8_t *inputBuffer = NULL;
-    int result = AndroidBitmap_lockPixels(
-            env,
-            inputBitmap,
-            reinterpret_cast<void**>(&inputBuffer));
-
-    if (result != ANDROID_BITMAP_RESULT_SUCCESS) {
-        ALOGE("Unable to lock input bitmap");
-    }
-
-    uint8_t *outputImage = NULL;
-    int outputWidth, outputHeight;
-
-    ALOGV("Input and output images created!");
-
-    // Find the color checker
-    bool success;
-    uint8_t *inputBufferRGBA = NULL;
-    int inputStride;
-    bool freeInputRGBA = false;
-    switch (inputInfo.format) {
-        case ANDROID_BITMAP_FORMAT_RGB_565: {
-            // First convert to RGBA_8888
-            inputBufferRGBA = new uint8_t[inputInfo.width *
-                                          inputInfo.height *
-                                          4];
-            inputStride = inputInfo.width * 4;
-            uint8_t *outP = inputBufferRGBA;
-            for (int y = 0; y < inputInfo.height; y++ ) {
-                uint16_t *inP = (uint16_t*)(&inputBuffer[y * inputInfo.stride]);
-                for (int x = 0; x < inputInfo.width; x++) {
-                    *(outP++) = ( ((*inP) >> 0) & 0x001F) << 3;
-                    *(outP++) = ( ((*inP) >> 5) & 0x003F) << 2;
-                    *(outP++) = ( ((*inP) >> 11) & 0x001F) << 3;
-                    outP++;
-                    inP++;
-                }
-            }
-            freeInputRGBA = true;
-
-            ALOGV("RGB_565 Format with width, height and stride as %d, %d, %d",
-                 inputInfo.width, inputInfo.height, inputStride);
-            break;
-        }
-        case ANDROID_BITMAP_FORMAT_RGBA_8888: {
-            // Already in RGBA
-            inputBufferRGBA = inputBuffer;
-            inputStride = inputInfo.stride;
-            ALOGV("RGB_8888 Format with width, height and stride as %d, %d, %d",
-                 inputInfo.width, inputInfo.height, inputStride);
-            break;
-        }
-    }
-
-    TestingImage *input_testing_image =
-            new TestingImage(inputBufferRGBA, inputInfo.height, inputInfo.width,
-                             4, inputStride, 120, 160);
-
-    long lp = (long)input_testing_image;
-
-    result = AndroidBitmap_unlockPixels(env, inputBitmap);
-    if (result != ANDROID_BITMAP_RESULT_SUCCESS) {
-        ALOGE("Unable to unlock input bitmap");
-    }
-
-    if (freeInputRGBA) {
-        ALOGV("Deleteing inputbufferRGBA");
-        delete[] inputBufferRGBA;
-    }
-
-    return lp;
-    ALOGV("Input format switched!");
-}
-
-jlong Java_com_android_cts_verifier_camera_analyzer_CameraTests_createImageTestHandler(
-        JNIEnv*      env,
-        jobject      thiz,
-        jint         debugHeight,
-        jint         debugWidth) {
-
-    ImageTestHandler* testHandler =
-            new ImageTestHandler(debugHeight, debugWidth);
-    long handlerAddress = (long)testHandler;
-    return handlerAddress;
-}
-
-void Java_com_android_cts_verifier_camera_analyzer_CameraTests_cleanUpHandler(
-        JNIEnv*      env,
-        jobject      thiz,
-        jlong        inputHandlerAddress) {
-
-    ImageTestHandler* testHandler = (ImageTestHandler*) (long) inputHandlerAddress;
-    delete testHandler;
-}
-
-void Java_com_android_cts_verifier_camera_analyzer_CameraTests_displayHandlerDebugOutput(
-        JNIEnv*      env,
-        jobject      thiz,
-        jlong        inputHandlerAddress) {
-
-    jclass thizCls = env->GetObjectClass(thiz);
-    jfieldID outputId = env->GetFieldID(thizCls, "mDebugOutput",
-                                        "Landroid/graphics/Bitmap;");
-
-    ImageTestHandler* testHandler = (ImageTestHandler*) (long) inputHandlerAddress;
-    uint8_t *outputImage =  new uint8_t[testHandler->getDebugHeight() *
-                                        testHandler->getDebugWidth() * 4];
-
-    const unsigned char *debugoutput = testHandler->debug_output();
-    memcpy(outputImage, debugoutput, testHandler->getDebugHeight() *
-            testHandler->getDebugWidth() * 4);
-
-    int outputWidth = testHandler->getDebugWidth();
-    int outputHeight = testHandler->getDebugHeight();
-    bool success = false;
-
-    if (outputImage == NULL) {
-        ALOGV("output Image is null!");
-    } else {
-        ALOGV("output Image is ready!");
-    }
-
-    // Create debug bitmap from output image data
-    if (outputImage != NULL) {
-        // Get method handles, create inputs to createBitmap
-        jclass bitmapClass =
-                env->FindClass("android/graphics/Bitmap");
-        jclass bitmapConfigClass =
-                env->FindClass("android/graphics/Bitmap$Config");
-
-        jmethodID createBitmap = env->GetStaticMethodID(
-            bitmapClass, "createBitmap",
-            "(IILandroid/graphics/Bitmap$Config;)Landroid/graphics/Bitmap;");
-
-        jmethodID getConfig = env->GetStaticMethodID(
-            bitmapConfigClass, "valueOf",
-            "(Ljava/lang/String;)Landroid/graphics/Bitmap$Config;");
-
-        // Create bitmap config and bitmap
-        jstring bitmapConfigValue = env->NewStringUTF("ARGB_8888");
-        jobject rgbaConfig = env->CallStaticObjectMethod(bitmapConfigClass,
-                                                         getConfig,
-                                                         bitmapConfigValue);
-        jobject outputBitmap = env->CallStaticObjectMethod(bitmapClass,
-                                                           createBitmap,
-                                                           outputWidth,
-                                                           outputHeight,
-                                                           rgbaConfig);
-        // Copy output image into it
-        uint8_t *outputBuffer;
-        int result = AndroidBitmap_lockPixels(
-                env,
-                outputBitmap,
-                reinterpret_cast<void**>(&outputBuffer) );
-
-        if (result != ANDROID_BITMAP_RESULT_SUCCESS) {
-            ALOGE("Unable to lock output bitmap");
-        }
-
-        memcpy(outputBuffer, outputImage, outputWidth * outputHeight * 4);
-
-        result = AndroidBitmap_unlockPixels(env, outputBitmap);
-        if (result != ANDROID_BITMAP_RESULT_SUCCESS) {
-            ALOGE("Unable to unlock output bitmap");
-        }
-
-        // Write new Bitmap reference into mDebugOutput class member
-        env->SetObjectField(thiz, outputId, outputBitmap);
-        ALOGV("Copied to outputBitmap");
-        delete [] outputImage;
-        env->DeleteLocalRef(outputBitmap);
-        env->DeleteLocalRef(rgbaConfig);
-        env->DeleteLocalRef(bitmapClass);
-        env->DeleteLocalRef(bitmapConfigClass);
-    }
-}
diff --git a/apps/CtsVerifier/jni/cameraanalyzer/com_android_cts_verifier_camera_analyzer_CameraTests.h b/apps/CtsVerifier/jni/cameraanalyzer/com_android_cts_verifier_camera_analyzer_CameraTests.h
deleted file mode 100644
index e071dc1..0000000
--- a/apps/CtsVerifier/jni/cameraanalyzer/com_android_cts_verifier_camera_analyzer_CameraTests.h
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef JNI_CAMERAANALYZER_CAMERATESTS_H
-#define JNI_CAMERAANALYZER_CAMERATESTS_H
-
-#include <jni.h>
-#include <stdio.h>
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-JNIEXPORT jlong JNICALL
-Java_com_android_cts_verifier_camera_analyzer_CameraTests_findNative(
-        JNIEnv *env,
-        jobject thiz,
-        jobject inputBitmap);
-
-JNIEXPORT jlong JNICALL
-Java_com_android_cts_verifier_camera_analyzer_CameraTests_createImageTestHandler(
-        JNIEnv*      env,
-        jobject      thiz,
-        jint         debugHeight,
-        jint         debugWidth);
-
-JNIEXPORT void JNICALL
-Java_com_android_cts_verifier_camera_analyzer_CameraTests_cleanUpHandler(
-        JNIEnv*      env,
-        jobject      thiz,
-        jlong        inputHandlerAddress);
-
-JNIEXPORT void JNICALL
-Java_com_android_cts_verifier_camera_analyzer_CameraTests_displayHandlerDebugOutput(
-        JNIEnv*      env,
-        jobject      thiz,
-        jlong        inputHandlerAddress);
-
-
-#ifdef __cplusplus
-}
-#endif
-
-#endif
diff --git a/apps/CtsVerifier/jni/cameraanalyzer/com_android_cts_verifier_camera_analyzer_ColorCheckerTest.cpp b/apps/CtsVerifier/jni/cameraanalyzer/com_android_cts_verifier_camera_analyzer_ColorCheckerTest.cpp
deleted file mode 100644
index 94e3ac2..0000000
--- a/apps/CtsVerifier/jni/cameraanalyzer/com_android_cts_verifier_camera_analyzer_ColorCheckerTest.cpp
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#define LOG_NDEBUG 0
-
-#define LOG_TAG "FindColorCheckerJNI"
-#include <utils/Log.h>
-#include "com_android_cts_verifier_camera_analyzer_ColorCheckerTest.h"
-
-#include <string.h>
-#include "android/bitmap.h"
-#include "colorcheckertest.h"
-#include "testingimage.h"
-
-jlong Java_com_android_cts_verifier_camera_analyzer_ColorCheckerTest_createColorCheckerTest(
-        JNIEnv*      env,
-        jobject      thiz,
-        jint         debugHeight,
-        jint         debugWidth) {
-    ColorCheckerTest* testHandler = new ColorCheckerTest(debugHeight,
-                                                         debugWidth);
-    long testHandlerAddress = (long)testHandler;
-    return testHandlerAddress;
-}
-
-void Java_com_android_cts_verifier_camera_analyzer_ColorCheckerTest_createColorCheckerClass(
-        JNIEnv*      env,
-        jobject      thiz,
-        jlong        inputImageAddress,
-        jlong        inputHandlerAddress) {
-    ALOGV("JNI createColorCheckerClass starts!");
-
-    TestingImage *testImage = (TestingImage*) (long) inputImageAddress;
-    ColorCheckerTest *testHandler = (ColorCheckerTest*)
-            (long) inputHandlerAddress;
-
-    testHandler->addTestingImage(testImage);
-}
-
-jboolean Java_com_android_cts_verifier_camera_analyzer_ColorCheckerTest_processColorCheckerTest(
-        JNIEnv*      env,
-        jobject      thiz,
-        jlong        inputHandlerAddress) {
-
-    ColorCheckerTest *testHandler = (ColorCheckerTest*)
-            (long) inputHandlerAddress;
-    testHandler->processData();
-    return testHandler->getSuccess();
-}
-
-jlong Java_com_android_cts_verifier_camera_analyzer_ColorCheckerTest_getColorCheckerRadiusAdd(
-        JNIEnv*      env,
-        jobject      thiz,
-        jlong        inputHandlerAddress) {
-
-    ColorCheckerTest *testHandler = (ColorCheckerTest*)
-            (long) inputHandlerAddress;
-    long rtn = (long) testHandler->getCheckerRadiusAdd();
-    return rtn;
-}
-
-jlong Java_com_android_cts_verifier_camera_analyzer_ColorCheckerTest_getColorCheckerCenterAdd(
-        JNIEnv*      env,
-        jobject      thiz,
-        jlong        inputHandlerAddress) {
-
-    ColorCheckerTest *testHandler = (ColorCheckerTest*)
-            (long) inputHandlerAddress;
-
-    long rtn = (long) testHandler->getCheckerCenterAdd();
-    return rtn;
-}
diff --git a/apps/CtsVerifier/jni/cameraanalyzer/com_android_cts_verifier_camera_analyzer_ColorCheckerTest.h b/apps/CtsVerifier/jni/cameraanalyzer/com_android_cts_verifier_camera_analyzer_ColorCheckerTest.h
deleted file mode 100644
index fb87735..0000000
--- a/apps/CtsVerifier/jni/cameraanalyzer/com_android_cts_verifier_camera_analyzer_ColorCheckerTest.h
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef JNI_CAMERAANALYZER_COLORCHECKERTEST_H
-#define JNI_CAMERAANALYZER_COLORCHECKERTEST_H
-
-#include <jni.h>
-#include <stdio.h>
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-JNIEXPORT jlong JNICALL
-Java_com_android_cts_verifier_camera_analyzer_ColorCheckerTest_createColorCheckerTest(
-    JNIEnv*      env,
-    jobject      thiz,
-    jint         output_height,
-    jint         output_width);
-
-JNIEXPORT void JNICALL
-Java_com_android_cts_verifier_camera_analyzer_ColorCheckerTest_createColorCheckerClass(
-    JNIEnv *env,
-    jobject thiz,
-    jlong buffer_address,
-    jlong handler_address);
-
-JNIEXPORT jlong JNICALL
-Java_com_android_cts_verifier_camera_analyzer_ColorCheckerTest_getColorCheckerRadiusAdd(
-    JNIEnv *env,
-    jobject thiz,
-    jlong handler_address);
-
-JNIEXPORT jlong JNICALL
-Java_com_android_cts_verifier_camera_analyzer_ColorCheckerTest_getColorCheckerCenterAdd(
-    JNIEnv *env,
-    jobject thiz,
-    jlong handler_address);
-
-JNIEXPORT jboolean JNICALL
-Java_com_android_cts_verifier_camera_analyzer_ColorCheckerTest_processColorCheckerTest(
-    JNIEnv*      env,
-    jobject      thiz,
-    jlong        handler_address);
-
-#ifdef __cplusplus
-}
-#endif
-
-#endif
diff --git a/apps/CtsVerifier/jni/cameraanalyzer/com_android_cts_verifier_camera_analyzer_ExposureCompensationTest.cpp b/apps/CtsVerifier/jni/cameraanalyzer/com_android_cts_verifier_camera_analyzer_ExposureCompensationTest.cpp
deleted file mode 100644
index 0224639..0000000
--- a/apps/CtsVerifier/jni/cameraanalyzer/com_android_cts_verifier_camera_analyzer_ExposureCompensationTest.cpp
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#define LOG_NDEBUG 0
-
-#define LOG_TAG "ExposureCompensationJNI"
-#include <utils/Log.h>
-#include <vector>
-#include <string.h>
-
-#include "android/bitmap.h"
-#include "testingimage.h"
-#include "exposurecompensationtest.h"
-#include "vec2.h"
-
-#include "com_android_cts_verifier_camera_analyzer_ExposureCompensationTest.h"
-
-jlong Java_com_android_cts_verifier_camera_analyzer_ExposureCompensationTest_createExposureCompensationTest(
-      JNIEnv*      env,
-      jobject      thiz,
-      jint         debugHeight,
-      jint         debugWidth) {
-
-    ExposureCompensationTest* testHandler =
-            new ExposureCompensationTest(debugHeight, debugWidth);
-    long handlerAddress = (long)testHandler;
-
-    return handlerAddress;
-}
-
-void Java_com_android_cts_verifier_camera_analyzer_ExposureCompensationTest_createExposureCompensationClass(
-        JNIEnv*      env,
-        jobject      thiz,
-        jlong        inputImageAddress,
-        jlong        inputHandlerAddress,
-        jlong        checkerCenterAddress,
-        jlong        checkerRadiusAddress,
-        jfloat       exposureValue) {
-
-    ALOGV("JNI createExposureCompensationClass starts!");
-
-    long imageAddress = (long)inputImageAddress;
-    long handlerAddress = (long)inputHandlerAddress;
-
-    TestingImage *inputImage = (TestingImage*) imageAddress;
-    ExposureCompensationTest *testHandler =
-            (ExposureCompensationTest*) handlerAddress;
-
-    std::vector<std::vector< Vec2f > >* checkerCenter =
-            (std::vector<std::vector< Vec2f > >*) (long) checkerCenterAddress;
-    std::vector<std::vector< float > >* checkerRadius =
-            (std::vector<std::vector< float > >*) (long) checkerRadiusAddress;
-
-    const std::vector<Vec3f>* checkerValue =
-            inputImage->getColorChecker(3, 4, 0, 6,
-                                        checkerCenter, checkerRadius);
-    testHandler->addDataToList((float) exposureValue, checkerValue);
-    delete inputImage;
-    delete checkerValue;
-}
-
-jstring Java_com_android_cts_verifier_camera_analyzer_ExposureCompensationTest_processExposureCompensationTest(
-        JNIEnv*      env,
-        jobject      thiz,
-        jlong        inputHandlerAddress) {
-
-    long handlerAddress = (long) inputHandlerAddress;
-    ExposureCompensationTest *testHandler =
-            (ExposureCompensationTest*) handlerAddress;
-
-    testHandler->processData();
-
-    const char* nativeDebugText = testHandler->getDebugText();
-    ALOGV("%s", nativeDebugText);
-    return env->NewStringUTF(nativeDebugText);
-}
diff --git a/apps/CtsVerifier/jni/cameraanalyzer/com_android_cts_verifier_camera_analyzer_ExposureCompensationTest.h b/apps/CtsVerifier/jni/cameraanalyzer/com_android_cts_verifier_camera_analyzer_ExposureCompensationTest.h
deleted file mode 100644
index 8e8761d..0000000
--- a/apps/CtsVerifier/jni/cameraanalyzer/com_android_cts_verifier_camera_analyzer_ExposureCompensationTest.h
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef JNI_CAMERAANALYZER_EXPOSURECOMPENSATIONTEST_H
-#define JNI_CAMERAANALYZER_EXPOSURECOMPENSATIONTEST_H
-
-#include <jni.h>
-#include <stdio.h>
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-JNIEXPORT jlong JNICALL
-Java_com_android_cts_verifier_camera_analyzer_ExposureCompensationTest_createExposureCompensationTest(
-        JNIEnv*      env,
-        jobject      thiz,
-        jint         debugHeight,
-        jint         debugWidth);
-
-JNIEXPORT void JNICALL
-Java_com_android_cts_verifier_camera_analyzer_ExposureCompensationTest_createExposureCompensationClass(
-        JNIEnv*      env,
-        jobject      thiz,
-        jlong        inputImageAddress,
-        jlong        inputHandlerAddress,
-        jlong        checkerCenterAddress,
-        jlong        checkerRadiusAddress,
-        jfloat       exposureValue);
-
-
-JNIEXPORT jstring JNICALL
-Java_com_android_cts_verifier_camera_analyzer_ExposureCompensationTest_processExposureCompensationTest(
-        JNIEnv*      env,
-        jobject      thiz,
-        jlong        inputHandlerAddress);
-#ifdef __cplusplus
-}
-#endif
-
-#endif
diff --git a/apps/CtsVerifier/jni/cameraanalyzer/com_android_cts_verifier_camera_analyzer_MeteringTest.cpp b/apps/CtsVerifier/jni/cameraanalyzer/com_android_cts_verifier_camera_analyzer_MeteringTest.cpp
deleted file mode 100644
index faebe21..0000000
--- a/apps/CtsVerifier/jni/cameraanalyzer/com_android_cts_verifier_camera_analyzer_MeteringTest.cpp
+++ /dev/null
@@ -1,142 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#define LOG_NDEBUG 0
-
-#define LOG_TAG "MeteringJNI"
-#include <utils/Log.h>
-#include "com_android_cts_verifier_camera_analyzer_MeteringTest.h"
-
-#include <vector>
-#include <string>
-#include <string.h>
-#include <math.h>
-
-#include "testingimage.h"
-#include "meteringtest.h"
-#include "vec2.h"
-#include "android/bitmap.h"
-
-jlong Java_com_android_cts_verifier_camera_analyzer_MeteringTest_createMeteringTest(
-        JNIEnv*      env,
-        jobject      thiz) {
-
-    MeteringTest* testHandler = new MeteringTest();
-    long handlerAddress = (long)testHandler;
-    return handlerAddress;
-}
-
-void Java_com_android_cts_verifier_camera_analyzer_MeteringTest_createMeteringClass(
-        JNIEnv*      env,
-        jobject      thiz,
-        jlong        inputImageAddress,
-        jlong        inputHandlerAddress,
-        jlong        checkercenterAddress,
-        jlong        checkerradiusAddress){
-
-    ALOGV("JNI createMeteringClass starts!");
-    long imageAddress = (long)inputImageAddress;
-    long handlerAddress = (long)inputHandlerAddress;
-
-    TestingImage *image = (TestingImage*) imageAddress;
-    MeteringTest *testHandler = (MeteringTest*) handlerAddress;
-
-    std::vector<std::vector< Vec2f > >* checkerCenter =
-            (std::vector<std::vector< Vec2f > >*) (long) checkercenterAddress;
-    std::vector<std::vector< float > >* checkerRadius =
-            (std::vector<std::vector< float > >*) (long) checkerradiusAddress;
-    ALOGV("Classes recovered");
-
-    testHandler->addDataToList(image->getColorChecker(3, 4, 0, 6,
-                                                      checkerCenter,
-                                                      checkerRadius));
-
-    delete image;
-}
-
-void Java_com_android_cts_verifier_camera_analyzer_MeteringTest_processMeteringTest(
-        JNIEnv*          env,
-        jobject          thiz,
-        jlong            inputHandlerAddress,
-        jbooleanArray    tempArray) {
-
-    ALOGV("Processing Auto Lock data!");
-
-    long handlerAddress = (long) inputHandlerAddress;
-    MeteringTest *testHandler = (MeteringTest*) handlerAddress;
-
-    testHandler->processData();
-
-    const std::vector<bool>* nativeComparisonResults =
-            testHandler->getComparisonResults();
-    jboolean jComparisonResults[nativeComparisonResults->size()];
-
-    for (int i = 0; i < nativeComparisonResults->size(); ++i) {
-        jComparisonResults[i] = (jboolean) (*nativeComparisonResults)[i];
-    }
-
-    env->SetBooleanArrayRegion(tempArray,
-                               0, nativeComparisonResults->size(),
-                               jComparisonResults);
-    testHandler->clearData();
-}
-
-// Find the gray checker borders from the native array of checker center and
-// radius. Convert the coordinate to the coordinates accepted by Android
-// Camera.Area type, which defines the top left corner to (-1000, -1000) and
-// bottom right corner to (1000, 1000).
-void Java_com_android_cts_verifier_camera_analyzer_MeteringTest_findGreyCoordinates(
-        JNIEnv*      env,
-        jobject      thiz,
-        jintArray    greyCoordinates,
-        jlong        checkercenterAddress,
-        jlong        checkerradiusAddress){
-
-    ALOGV("Start finding grey coordinates");
-
-    std::vector<std::vector< Vec2f > >* checkerCenter =
-            (std::vector<std::vector< Vec2f > >*) (long) checkercenterAddress;
-    std::vector<std::vector< float > >* checkerRadius =
-            (std::vector<std::vector< float > >*) (long) checkerradiusAddress;
-
-    ALOGV("Checker recovered!");
-    int nativeGreyCoordinates[24];
-
-    for (int i = 0; i < 6; ++i) {
-        float radius = sqrt((*checkerRadius)[3][i]);
-        nativeGreyCoordinates[i * 4] = static_cast<int>(
-                ((*checkerCenter)[3][i].y() - radius)
-                / 160.0 * 2000.0 - 1000.0);
-        nativeGreyCoordinates[i * 4 + 1] = static_cast<int>(
-                ((*checkerCenter)[3][i].x() - radius)
-                / 120.0 * 2000.0 - 1000.0);
-        nativeGreyCoordinates[i * 4 + 2] = static_cast<int>(
-                ((*checkerCenter)[3][i].y() + radius)
-                / 160.0 * 2000.0 - 1000.0);
-        nativeGreyCoordinates[i * 4 + 3] = static_cast<int>(
-                ((*checkerCenter)[3][i].x() + radius)
-                / 120.0 * 2000.0 - 1000.0);
-
-        ALOGV("checker is bounded by %f, %f, %f",
-             (*checkerCenter)[3][i].x(), (*checkerCenter)[3][i].y(), radius);
-
-        ALOGV("Square is bounded by %d, %d, %d, %d",
-             nativeGreyCoordinates[i * 4], nativeGreyCoordinates[i * 4 + 1],
-             nativeGreyCoordinates[i * 4 + 2],
-             nativeGreyCoordinates[i * 4 + 3]);
-    }
-
-    env->SetIntArrayRegion(greyCoordinates, 0, 24, nativeGreyCoordinates);
-}
diff --git a/apps/CtsVerifier/jni/cameraanalyzer/com_android_cts_verifier_camera_analyzer_MeteringTest.h b/apps/CtsVerifier/jni/cameraanalyzer/com_android_cts_verifier_camera_analyzer_MeteringTest.h
deleted file mode 100644
index ecc1b96..0000000
--- a/apps/CtsVerifier/jni/cameraanalyzer/com_android_cts_verifier_camera_analyzer_MeteringTest.h
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef JNI_CAMERAANALYZER_METERINGTEST_H
-#define JNI_CAMERAANALYZER_METERINGTEST_H
-
-#include <jni.h>
-#include <stdio.h>
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-JNIEXPORT jlong JNICALL
-Java_com_android_cts_verifier_camera_analyzer_MeteringTest_createMeteringTest(
-        JNIEnv*      env,
-        jobject      thiz);
-
-JNIEXPORT void JNICALL
-Java_com_android_cts_verifier_camera_analyzer_MeteringTest_createMeteringClass(
-        JNIEnv *env,
-        jobject thiz,
-        jlong inputAddress,
-        jlong handlerAddress,
-        jlong checkercenterAddress,
-        jlong checkerradiusAddress);
-
-JNIEXPORT void JNICALL
-Java_com_android_cts_verifier_camera_analyzer_MeteringTest_processMeteringTest(
-        JNIEnv*      env,
-        jobject      thiz,
-        jlong        handlerAddress,
-        jbooleanArray    tempArray);
-
-JNIEXPORT void JNICALL
-Java_com_android_cts_verifier_camera_analyzer_MeteringTest_findGreyCoordinates(
-        JNIEnv*      env,
-        jobject      thiz,
-        jintArray    greyCoordinates,
-        jlong        checkercenterAddress,
-        jlong        checkerradiusAddress);
-
-#ifdef __cplusplus
-}
-#endif
-
-#endif
diff --git a/apps/CtsVerifier/jni/cameraanalyzer/com_android_cts_verifier_camera_analyzer_WhiteBalanceTest.cpp b/apps/CtsVerifier/jni/cameraanalyzer/com_android_cts_verifier_camera_analyzer_WhiteBalanceTest.cpp
deleted file mode 100644
index bce0fca..0000000
--- a/apps/CtsVerifier/jni/cameraanalyzer/com_android_cts_verifier_camera_analyzer_WhiteBalanceTest.cpp
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#define LOG_NDEBUG 0
-
-#define LOG_TAG "WhiteBalanceJNI"
-#include <utils/Log.h>
-#include "com_android_cts_verifier_camera_analyzer_WhiteBalanceTest.h"
-
-#include <vector>
-#include <string>
-#include <string.h>
-
-#include "testingimage.h"
-#include "whitebalancetest.h"
-#include "vec2.h"
-#include "android/bitmap.h"
-
-jlong Java_com_android_cts_verifier_camera_analyzer_WhiteBalanceTest_createWhiteBalanceTest(
-        JNIEnv*      env,
-        jobject      thiz) {
-
-    WhiteBalanceTest* testHandler = new WhiteBalanceTest();
-    long handlerAddress = (long)testHandler;
-    return handlerAddress;
-}
-
-void Java_com_android_cts_verifier_camera_analyzer_WhiteBalanceTest_createWhiteBalanceClass(
-        JNIEnv*      env,
-        jobject      thiz,
-        jlong        inputImageAddress,
-        jlong        inputHandlerAddress,
-        jlong        checkercenterAddress,
-        jlong        checkerradiusAddress,
-        jstring      whiteBalance){
-
-    ALOGV("JNI createWhiteBalanceClass starts!");
-    long imageAddress = (long)inputImageAddress;
-    long handlerAddress = (long)inputHandlerAddress;
-
-    TestingImage *image = (TestingImage*) imageAddress;
-    WhiteBalanceTest *testHandler = (WhiteBalanceTest*) handlerAddress;
-
-    std::vector<std::vector< Vec2f > >* checkerCenter =
-        (std::vector<std::vector< Vec2f > >*) (long) checkercenterAddress;
-    std::vector<std::vector< float > >* checkerRadius =
-        (std::vector<std::vector< float > >*) (long) checkerradiusAddress;
-    ALOGV("Classes recovered");
-
-    jboolean isCopy;
-    const char* stringWhiteBalance =
-            env->GetStringUTFChars(whiteBalance, &isCopy);
-    ALOGV("White Balance is %s", stringWhiteBalance);
-
-    // Adds the gray checker's RGB values to the test handler.
-    testHandler->addDataToList(stringWhiteBalance,
-                               image->getColorChecker(3, 4, 0, 6,
-                                                      checkerCenter,
-                                                      checkerRadius));
-
-    env->ReleaseStringUTFChars(whiteBalance, stringWhiteBalance);
-    delete image;
-}
-
-jint Java_com_android_cts_verifier_camera_analyzer_WhiteBalanceTest_processWhiteBalanceTest(
-    JNIEnv*      env,
-    jobject      thiz,
-    jlong        inputHandlerAddress) {
-  ALOGV("Processing white balance test");
-
-  long handlerAddress = (long) inputHandlerAddress;
-  WhiteBalanceTest *testHandler = (WhiteBalanceTest*) handlerAddress;
-
-  testHandler->processData();
-
-  ALOGV("CCT is %d", testHandler->getCorrelatedColorTemp());
-  return testHandler->getCorrelatedColorTemp();
-}
diff --git a/apps/CtsVerifier/jni/cameraanalyzer/com_android_cts_verifier_camera_analyzer_WhiteBalanceTest.h b/apps/CtsVerifier/jni/cameraanalyzer/com_android_cts_verifier_camera_analyzer_WhiteBalanceTest.h
deleted file mode 100644
index 88cf52e..0000000
--- a/apps/CtsVerifier/jni/cameraanalyzer/com_android_cts_verifier_camera_analyzer_WhiteBalanceTest.h
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef JNI_CAMERAANALYZER_WHITEBALANCETEST_H
-#define JNI_CAMERAANALYZER_WHITEBALANCETEST_H
-
-#include <jni.h>
-#include <stdio.h>
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-JNIEXPORT jlong JNICALL
-Java_com_android_cts_verifier_camera_analyzer_WhiteBalanceTest_createWhiteBalanceTest(
-        JNIEnv*      env,
-        jobject      thiz);
-
-JNIEXPORT void JNICALL
-Java_com_android_cts_verifier_camera_analyzer_WhiteBalanceTest_createWhiteBalanceClass(
-        JNIEnv *env,
-        jobject thiz,
-        jlong inputAddress,
-        jlong handlerAddress,
-        jlong checkercenterAddress,
-        jlong checkerradiusAddress,
-        jstring whiteBalance);
-
-JNIEXPORT jint JNICALL
-Java_com_android_cts_verifier_camera_analyzer_WhiteBalanceTest_processWhiteBalanceTest(
-        JNIEnv*      env,
-        jobject      thiz,
-        jlong        handlerAddress);
-
-
-#ifdef __cplusplus
-}
-#endif
-
-#endif
diff --git a/apps/CtsVerifier/lib/Android.mk b/apps/CtsVerifier/lib/Android.mk
deleted file mode 100644
index 56a3fa8..0000000
--- a/apps/CtsVerifier/lib/Android.mk
+++ /dev/null
@@ -1,17 +0,0 @@
-#
-# Copyright (C) 2011 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-include $(call all-subdir-makefiles)
diff --git a/apps/CtsVerifier/lib/colorchecker/Android.mk b/apps/CtsVerifier/lib/colorchecker/Android.mk
deleted file mode 100644
index 48f1356..0000000
--- a/apps/CtsVerifier/lib/colorchecker/Android.mk
+++ /dev/null
@@ -1,43 +0,0 @@
-# Copyright (C) 2011 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-LOCAL_PATH := $(call my-dir)
-
-#####################
-# Build image analysis library
-
-include $(CLEAR_VARS)
-LOCAL_ADDITIONAL_DEPENDENCIES := $(LOCAL_PATH)/Android.mk
-
-LOCAL_MODULE_TAGS := optional
-LOCAL_MODULE := libcolorchecker
-
-LOCAL_SRC_FILES += testingimage.cpp \
-                   vec3.cpp \
-                   vec2.cpp \
-                   imagetesthandler.cpp \
-                   colorcheckertest.cpp \
-                   exposurecompensationtest.cpp \
-                   autolocktest.cpp \
-                   meteringtest.cpp \
-                   whitebalancetest.cpp
-
-LOCAL_C_INCLUDES += $(LOCAL_PATH)/../../include/colorchecker
-LOCAL_CXX_STL := libc++
-LOCAL_SHARED_LIBRARIES := \
-    libcutils \
-    libutils \
-
-include $(BUILD_STATIC_LIBRARY)
diff --git a/apps/CtsVerifier/lib/colorchecker/autolocktest.cpp b/apps/CtsVerifier/lib/colorchecker/autolocktest.cpp
deleted file mode 100644
index 6bfa922..0000000
--- a/apps/CtsVerifier/lib/colorchecker/autolocktest.cpp
+++ /dev/null
@@ -1,101 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#define LOG_NDEBUG 0
-
-#define LOG_TAG "AutoLockTest"
-#include <utils/Log.h>
-#include <utils/Timers.h>
-#include <cmath>
-#include <string>
-
-#include "vec2.h"
-#include "vec3.h"
-#include "autolocktest.h"
-
-const float kOverExposure = 230.f;
-const float kEqThreshold = 0.05f;
-// Processes the color checker values and compare the two values from
-// the same individual test.
-void AutoLockTest::processData() {
-    ALOGV("Start Processing Auto Lock Test Data!");
-
-    int numTests = mCheckerColors.size() / 2;
-    mNumPatches = 0;
-
-    if (numTests > 0) {
-        mNumPatches = mCheckerColors[0].size();
-    }
-
-    for (int i = 0; i < numTests; ++i) {
-        mComparisonResults.push_back(
-                IsBrighterThan((&mCheckerColors[i * 2]),
-                               (&mCheckerColors[i * 2 + 1])));
-        mComparisonResults.push_back(
-                IsEquivalentTo((&mCheckerColors[i * 2]),
-                               (&mCheckerColors[i * 2 + 1])));
-    }
-}
-
-// Compares whether one array of gray color patches is brighter than
-// another one.
-bool AutoLockTest::IsBrighterThan(
-        const std::vector<Vec3f>* colorCheckers1,
-        const std::vector<Vec3f>* colorCheckers2) const {
-    float meanRatio = 0.f;
-    int meanNumCount = 0;
-
-    for (int i = 0; i < mNumPatches; ++i) {
-        float luminance1 = (*colorCheckers1)[i].convertToLuminance();
-        float luminance2 = (*colorCheckers2)[i].convertToLuminance();
-
-        // Consider a 5% raise as a considerably large increase.
-        if ((luminance1 < kOverExposure) && (luminance2 != 0.f)) {
-            meanRatio += luminance1 / luminance2;
-            ++meanNumCount;
-        }
-    }
-    meanRatio = meanRatio / meanNumCount;
-
-    return (meanRatio > 1 + kEqThreshold);
-}
-
-// Compares whether one array of gray color patches is within a small range
-// of the other one to be considered equivalent.
-bool AutoLockTest::IsEquivalentTo(
-        const std::vector<Vec3f>* colorCheckers1,
-        const std::vector<Vec3f>* colorCheckers2) const {
-    float meanRatio = 0.f;
-    int meanNumCount = 0;
-
-    for (int i = 0; i < mNumPatches; ++i) {
-        float luminance1 = (*colorCheckers1)[i].convertToLuminance();
-        float luminance2 = (*colorCheckers2)[i].convertToLuminance();
-        ALOGV("Luma_1 and Luma_2 is %f, %f", luminance1, luminance2);
-
-        if ((luminance1 < kOverExposure) && (luminance2 < kOverExposure)) {
-              meanRatio += luminance2 / luminance1;
-              ++meanNumCount;
-        }
-    }
-    meanRatio = meanRatio / meanNumCount;
-
-    return ((meanRatio >= 1 - kEqThreshold) && (meanRatio <= 1 + kEqThreshold));
-}
-
-void AutoLockTest::clearData() {
-    mCheckerColors.clear();
-    mComparisonResults.clear();
-}
diff --git a/apps/CtsVerifier/lib/colorchecker/colorcheckertest.cpp b/apps/CtsVerifier/lib/colorchecker/colorcheckertest.cpp
deleted file mode 100644
index ef7d2c6..0000000
--- a/apps/CtsVerifier/lib/colorchecker/colorcheckertest.cpp
+++ /dev/null
@@ -1,979 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#define LOG_NDEBUG 0
-
-#define LOG_TAG "ColorCheckerTest"
-#include <utils/Log.h>
-#include <utils/Timers.h>
-#include <cmath>
-#include <string>
-
-#include "vec2.h"
-#include "vec3.h"
-#include "colorcheckertest.h"
-
-const float GAMMA_CORRECTION = 2.2f;
-const float COLOR_ERROR_THRESHOLD = 200.f;
-ColorCheckerTest::~ColorCheckerTest() {
-    ALOGV("Deleting color checker test handler");
-
-    if (mImage != NULL) {
-        delete mImage;
-    }
-    ALOGV("Image deleted");
-
-    int numHorizontalLines = mCandidateColors.size();
-    int numVerticalLines = mCandidateColors[0].size();
-
-    for (int i = 0; i < numHorizontalLines; ++i) {
-        for (int j = 0; j < numVerticalLines; ++j) {
-            if (mCandidateColors[i][j] != NULL) {
-                delete mCandidateColors[i][j];
-            }
-            if (mCandidatePositions[i][j] != NULL) {
-                delete mCandidatePositions[i][j];
-            }
-        }
-    }
-    ALOGV("Candidates deleted!");
-
-    for (int i = 0; i < 4; ++i) {
-        for (int j = 0; j < 6; ++j) {
-            if (mMatchPositions[i][j] != NULL) {
-                delete mMatchPositions[i][j];
-            }
-            if (mReferenceColors[i][j] != NULL) {
-                delete mReferenceColors[i][j];
-            }
-            if (mMatchColors[i][j] != NULL) {
-                delete mMatchColors[i][j];
-            }
-        }
-    }
-}
-
-// Adds a new image to the test handler.
-void ColorCheckerTest::addTestingImage(TestingImage* inputImage) {
-    if (mImage != NULL) {
-        delete mImage;
-    }
-    mImage = NULL;
-    ALOGV("Original image deleted");
-    mImage = inputImage;
-
-    if ((mImage->getHeight() == getDebugHeight()) &&
-        (mImage->getWidth() == getDebugWidth())) {
-        copyDebugImage(getDebugHeight(), getDebugWidth(), mImage->getImage());
-    }
-}
-
-void ColorCheckerTest::processData() {
-    mSuccess = false;
-    initializeRefColor();
-    edgeDetection();
-}
-
-void ColorCheckerTest::initializeRefColor() {
-    mReferenceColors.resize(4, std::vector<Vec3i*>(6, NULL));
-    mMatchPositions.resize(4, std::vector<Vec2f*>(6, NULL));
-    mMatchColors.resize(4, std::vector<Vec3f*>(6, NULL));
-    mMatchRadius.resize(4, std::vector<float>(6, 0.f));
-
-    mReferenceColors[0][0]= new Vec3i(115, 82, 68);
-    mReferenceColors[0][1]= new Vec3i(194, 150, 130);
-    mReferenceColors[0][2]= new Vec3i(98, 122, 157);
-    mReferenceColors[0][3]= new Vec3i(87, 108, 67);
-    mReferenceColors[0][4]= new Vec3i(133, 128, 177);
-    mReferenceColors[0][5]= new Vec3i(103, 189, 170);
-    mReferenceColors[1][0]= new Vec3i(214, 126, 44);
-    mReferenceColors[1][1]= new Vec3i(80, 91, 166);
-    mReferenceColors[1][2]= new Vec3i(193, 90, 99);
-    mReferenceColors[1][3]= new Vec3i(94,  60, 108);
-    mReferenceColors[1][4]= new Vec3i(157, 188, 64);
-    mReferenceColors[1][5]= new Vec3i(224, 163, 46);
-    mReferenceColors[2][0]= new Vec3i(56, 61, 150);
-    mReferenceColors[2][1]= new Vec3i(70, 148, 73);
-    mReferenceColors[2][2]= new Vec3i(175, 54, 60);
-    mReferenceColors[2][3]= new Vec3i(231, 199, 31);
-    mReferenceColors[2][4]= new Vec3i(187, 86, 149);
-    mReferenceColors[2][5]= new Vec3i(8, 133, 161);
-    mReferenceColors[3][0]= new Vec3i(243, 243, 242);
-    mReferenceColors[3][1]= new Vec3i(200, 200, 200);
-    mReferenceColors[3][2]= new Vec3i(160, 160, 160);
-    mReferenceColors[3][3]= new Vec3i(122, 122, 121);
-    mReferenceColors[3][4]= new Vec3i(85, 85, 85);
-    mReferenceColors[3][5]= new Vec3i(52, 52, 52);
-}
-
-void ColorCheckerTest::edgeDetection() {
-    int width = mImage->getWidth();
-    int height = mImage->getHeight();
-
-    bool* edgeMap = new bool[height * width];
-    unsigned char* grayImage = new unsigned char[height * width];
-
-    // If the image is a color image and can be converted to a luminance layer
-    if (mImage->rgbToGrayScale(grayImage)) {
-        float* gradientMap = new float[height * width * 2];
-
-        // Computes the gradient image on the luminance layer.
-        computeGradient(grayImage, gradientMap);
-
-        float* gradientMagnitude = new float[height * width];
-        int* gradientDirectionInt = new int[height * width];
-        float* gradientDirection = new float[height * width];
-
-        // Computes the absolute gradient of the image without padding.
-        for (int i = 1; i < height - 1; ++i) {
-            for (int j = 1; j < width - 1; ++j) {
-                gradientMagnitude[i * width + j] =
-                        sqrt(gradientMap[(i * width + j) * 2] *
-                             gradientMap[(i * width + j) * 2] +
-                             gradientMap[(i * width + j ) * 2 + 1] *
-                             gradientMap[(i * width + j ) * 2 + 1]);
-
-                // Computes the gradient direction of the image.
-                if (gradientMap[(i * width + j) * 2] == 0 ) {
-                    // If the vertical gradient is 0, the edge is horizontal
-                    // Mark the gradient direction as 90 degrees.
-                    gradientDirectionInt[i * width + j] = 2;
-                    gradientDirection[i * width + j] = 90.0f;
-                } else {
-                    // Otherwise the atan operation is valid and can decide
-                    // the gradient direction of the edge.
-                    float gradient = atan(gradientMap[(i * width + j) * 2 + 1]
-                            / gradientMap[(i * width + j) * 2])
-                            / (M_PI / 4);
-
-                    gradientDirection[i * width + j] = gradient * 45.0f;
-
-                    // Maps the gradient direction to 4 major directions with
-                    // 0 mapped to up and 2 mapped to right.
-                    if (gradient - floor(gradient) > 0.5) {
-                        gradientDirectionInt[i * width + j] =
-                                (static_cast<int>(ceil(gradient)) + 4) % 4;
-                    } else {
-                        gradientDirectionInt[i * width + j] =
-                                (static_cast<int>(floor(gradient)) + 4) % 4;
-                    }
-                }
-            }
-        }
-
-        // Compute a boolean map to show whether a pixel is on the edge.
-        for (int i = 1; i < height - 1; ++i) {
-            for (int j = 1; j < width - 1; ++j) {
-                edgeMap[i * width + j] = false;
-
-                switch (gradientDirectionInt[i * width + j]) {
-                    case 0:
-                        // If the gradient points rightwards, the pixel is
-                        // on an edge if it has a larger absolute gradient than
-                        // pixels on its left and right sides.
-                        if ((gradientMagnitude[i * width + j] >=
-                                gradientMagnitude[i * width + j + 1]) &&
-                            (gradientMagnitude[i * width + j] >=
-                                gradientMagnitude[i * width + j - 1])) {
-                            edgeMap[i * width + j] = true;
-                        }
-                        break;
-                    case 1:
-                        // If the gradient points right-downwards, the pixel is
-                        // on an edge if it has a larger absolute gradient than
-                        // pixels on its upper left and bottom right sides.
-                        if ((gradientMagnitude[i * width + j] >=
-                                gradientMagnitude[(i + 1) * width + j + 1]) &&
-                            (gradientMagnitude[i * width + j] >=
-                                gradientMagnitude[(i - 1) * width + j - 1])) {
-                            edgeMap[i * width + j] = true;
-                        }
-                        break;
-                    case 2:
-                        // If the gradient points upwards, the pixel is
-                        // on an edge if it has a larger absolute gradient than
-                        // pixels on its up and down sides.
-                        if ((gradientMagnitude[i * width + j] >=
-                                gradientMagnitude[(i + 1) * width + j]) &&
-                            (gradientMagnitude[i * width + j] >=
-                                gradientMagnitude[(i - 1) * width + j])) {
-                            edgeMap[i * width + j] = true;
-                        }
-                        break;
-                    case 3:
-                        // If the gradient points right-upwards, the pixel is
-                        // on an edge if it has a larger absolute gradient than
-                        // pixels on its bottom left and upper right sides.
-                        if ((gradientMagnitude[i * width + j] >=
-                                gradientMagnitude[(i - 1) * width + j + 1]) &&
-                            (gradientMagnitude[i * width + j] >=
-                                gradientMagnitude[(i + 1) * width + j - 1])) {
-                            edgeMap[i * width + j] = true;
-                        }
-                  }
-
-             }
-        }
-
-        houghLineDetection(edgeMap, gradientMagnitude, gradientDirection);
-
-        // Cleans up
-        delete[] gradientMap;
-        delete[] gradientDirectionInt;
-        delete[] gradientMagnitude;
-        delete[] gradientDirection;
-
-    } else {
-        ALOGE("Not a color image!");
-    }
-
-    delete[] edgeMap;
-    delete[] grayImage;
-}
-
-// Runs the hough voting algorithm to find the grid of the color checker
-// with the edge map, gradient direction and gradient magnitude as inputs.
-void ColorCheckerTest::houghLineDetection(bool* edgeMap,
-                                          float* gradientMagnitude,
-                                          float* gradientDirection) {
-    // Constructs a graph for Hough voting. The vertical axis counts the vote
-    // for a certain angle. The horizontal axis counts the vote for the distance
-    // of a line from the origin of the image.
-    int houghHeight = 180;
-    int houghWidth = 200;
-    int houghCounts[houghHeight][houghWidth];
-    int houghSum[houghHeight][houghWidth];
-
-    int** houghVote;
-    houghVote = new int*[180];
-    for (int i = 0; i < 180; ++i) {
-        houghVote[i] = new int[200];
-    }
-
-    for (int i = 0; i < houghHeight; ++i) {
-        for (int j = 0; j < houghWidth; ++j) {
-            houghCounts[i][j] = 0;
-            houghVote[i][j] = 0;
-            houghSum[i][j] = 0;
-        }
-    }
-
-    // Vectors to record lines in two orthogonal directions.
-    // Each line is represented by its direction and its distance to the origin.
-    std::vector<std::vector<int> > verticalLines;
-    std::vector<std::vector<int> > horizontalLines;
-    float radius;
-    int height = mImage->getHeight();
-    int width = mImage->getWidth();
-
-    // Processes the signicant edge pixels and cast vote for the corresponding
-    // edge passing this pixel.
-    for (int i = 1; i < height - 1; ++i) {
-        for (int j = 1; j < width - 1; ++j) {
-            // Sets threashold for the gradient magnitude to discount noises
-            if (edgeMap[i * width + j] &&
-                (gradientMagnitude[i * width + j] > 15)) {
-                int shiftedAngle;
-
-                // Shifts angles for 45 degrees so the vertical and horizontal
-                // direction is mapped to 45 and 135 degrees to avoid padding.
-                // This uses the assumption that the color checker is placed
-                // roughly parallel to the image boarders. So that the edges
-                // at the angle of 45 will be rare.
-                shiftedAngle = (static_cast<int>(
-                        -gradientDirection[i * width + j]) + 225 % 180);
-                float shiftedAngleRad = static_cast<float>(shiftedAngle)
-                        * M_PI / 180.0f;
-
-                // Computes the distance of the line from the origin.
-                float a, b;
-                a = static_cast<float>(i - j) / sqrt(2.0f);
-                b = static_cast<float>(i + j) / sqrt(2.0f);
-                radius = a * sin(shiftedAngleRad) - b * cos(shiftedAngleRad);
-
-                // Adds one vote for the line. The line's angle is shifted by
-                // 45 degrees to avoid avoid padding for the vertical lines,
-                // which is more common than diagonal lines. The line's
-                // distance is mapped to [0, 200] from [-200, 200].
-                ++houghCounts[shiftedAngle][static_cast<int>((radius / 2.0f) +
-                                                              100.0f)];
-
-                drawPoint(i, j, Vec3i(255, 255, 255));
-            }
-        }
-    }
-
-    int houghAngleSum[houghHeight];
-    int primaryVerticalAngle, primaryHorizontalAngle;
-    int max1 = 0;
-    int max2 = 0;
-
-    // Looking for the two primary angles of the lines.
-    for (int i = 5; i < houghHeight - 5; ++i) {
-        houghAngleSum[i] = 0;
-        for (int j = 0; j < houghWidth; ++j) {
-            for (int l = -5; l <= 5; ++l) {
-                houghSum[i][j] += houghCounts[i + l][j];
-            }
-            houghAngleSum[i] += houghSum[i][j];
-        }
-
-        if ((i < houghHeight / 2) && (houghAngleSum[i] > max1)) {
-            max1 = houghAngleSum[i];
-            primaryVerticalAngle = i;
-        } else if ((i > houghHeight / 2) && (houghAngleSum[i] > max2)) {
-            max2 = houghAngleSum[i];
-            primaryHorizontalAngle = i;
-        }
-    }
-
-    ALOGV("Primary angles are %d, %d",
-         primaryVerticalAngle, primaryHorizontalAngle);
-
-    int angle;
-
-    // For each primary angle, look for the highest voted lines.
-    for (int k = 0; k < 2; ++k) {
-        if (k == 0) {
-            angle = primaryVerticalAngle;
-        } else {
-            angle = primaryHorizontalAngle;
-        }
-
-        std::vector<int> line(2);
-        for (int j = 2; j < houghWidth - 2; ++j) {
-            houghVote[angle][j] = houghSum[angle][j];
-            houghSum[angle][j] = 0;
-        }
-
-        // For each radius, average the vote with nearby ones.
-        for (int j = 2; j < houghWidth - 2; ++j) {
-            for (int m = -2; m <= 2; ++m) {
-                houghSum[angle][j] += houghVote[angle][j + m];
-            }
-        }
-
-        bool isCandidate[houghWidth];
-
-        // Find whether a lines is a candidate by rejecting the ones that have
-        // lower vote than others in the neighborhood.
-        for (int j = 2; j < houghWidth - 2; ++j) {
-            isCandidate[j] = true;
-            for (int m = -2; ((isCandidate[j]) && (m <= 2)); ++m) {
-                if ((houghSum[angle][j] < 20) ||
-                    (houghSum[angle][j] < houghSum[angle][j + m])) {
-                    isCandidate[j] = false;
-                }
-            }
-        }
-
-        int iter1 = 0;
-        int iter2 = 0;
-        int count = 0;
-
-        // Finds the lines that are not too close to each other and add to the
-        // detected lines.
-        while (iter2 < houghWidth) {
-            while ((!isCandidate[iter2]) && (iter2 < houghWidth)) {
-                ++iter2;
-            }
-            if ((isCandidate[iter2]) && (iter2 - iter1 < 5)) {
-                iter1 = (iter2 + iter1) / 2;
-                ++iter2;
-            } else {
-                line[0] = angle;
-                line[1] = (iter1 - 100) * 2;
-                if (iter1 != 0) {
-                    if (k == 0) {
-                        verticalLines.push_back(line);
-                        Vec3i color(verticalLines.size() * 20, 0, 0);
-                        drawLine(line[0], line[1], color);
-                    } else {
-                        horizontalLines.push_back(line);
-                        Vec3i color(0, horizontalLines.size() * 20, 0);
-                        drawLine(line[0], line[1], color);
-                    }
-                }
-                iter1 = iter2;
-                ++iter2;
-                ALOGV("pushing back line %d %d", line[0], line[1]);
-            }
-        }
-    }
-
-    ALOGV("Numbers of lines in each direction is %d, %d",
-         verticalLines.size(), horizontalLines.size());
-
-    for (int i = 0; i < 180; ++i) {
-        delete[] houghVote[i];
-    }
-    delete[] houghVote;
-
-    findCheckerBoards(verticalLines, horizontalLines);
-}
-
-// Computes the gradient in both x and y direction of a layer
-void ColorCheckerTest::computeGradient(unsigned char* layer,
-                                       float* gradientMap) {
-    int width = mImage->getWidth();
-    int height = mImage->getHeight();
-
-    // Computes the gradient in the whole image except the image boarders.
-    for (int i = 1; i < height - 1; ++i) {
-        for (int j = 1; j < width - 1; ++j) {
-            gradientMap[(i * width + j) * 2] =
-                    0.5f * (layer[i * width + j + 1] -
-                            layer[i * width + j - 1]);
-            gradientMap[(i * width + j) * 2 + 1] =
-                    0.5f * (layer[(i + 1) * width + j] -
-                           layer[(i - 1) * width + j]);
-        }
-    }
-}
-
-// Tries to find the checker boards with the highest voted lines
-void ColorCheckerTest::findCheckerBoards(
-        std::vector<std::vector<int> > verticalLines,
-        std::vector<std::vector<int> > horizontalLines) {
-    ALOGV("Start looking for Color checker");
-
-    int numHorizontalLines = mCandidateColors.size();
-    int numVerticalLines;
-    if (numHorizontalLines > 0) {
-        numVerticalLines = mCandidateColors[0].size();
-        for (int i = 0; i < numHorizontalLines; ++i) {
-            for (int j = 0; j < numVerticalLines; ++j) {
-                if (mCandidateColors[i][j] != NULL) {
-                    delete mCandidateColors[i][j];
-                }
-                if (mCandidatePositions[i][j] != NULL) {
-                    delete mCandidatePositions[i][j];
-                }
-            }
-            mCandidateColors[i].clear();
-            mCandidatePositions[i].clear();
-        }
-    }
-    mCandidateColors.clear();
-    mCandidatePositions.clear();
-
-    ALOGV("Candidates deleted!");
-
-    numVerticalLines = verticalLines.size();
-    numHorizontalLines = horizontalLines.size();
-    Vec2f pointUpperLeft;
-    Vec2f pointBottomRight;
-
-    mCandidateColors.resize(numHorizontalLines - 1);
-    mCandidatePositions.resize(numHorizontalLines - 1);
-
-    for (int i = numVerticalLines - 1; i >= 1; --i) {
-        for (int j = 0; j < numHorizontalLines - 1; ++j) {
-            // Finds the upper left and bottom right corner of each rectangle
-            // formed by two neighboring highest voted lines.
-            pointUpperLeft = findCrossing(verticalLines[i], horizontalLines[j]);
-            pointBottomRight = findCrossing(verticalLines[i - 1],
-                                            horizontalLines[j + 1]);
-
-            Vec3i* color = new Vec3i();
-            Vec2f* pointCenter = new Vec2f();
-            // Verifies if they are separated by a reasonable distance.
-            if (verifyPointPair(pointUpperLeft, pointBottomRight,
-                                pointCenter, color)) {
-                mCandidatePositions[j].push_back(pointCenter);
-                mCandidateColors[j].push_back(color);
-                ALOGV("Color at (%d, %d) is (%d, %d, %d)", j, i,color->r(), color->g(), color->b());
-
-            } else {
-                mCandidatePositions[j].push_back(NULL);
-                mCandidateColors[j].push_back(NULL);
-                delete color;
-                delete pointCenter;
-            }
-        }
-    }
-
-    ALOGV("Candidates Number (%d, %d)", mCandidateColors.size(), mCandidateColors[0].size());
-    // Verifies whether the current line candidates form a valid color checker.
-    verifyColorGrid();
-}
-
-// Returns the corssing point of two lines given the lines.
-Vec2f ColorCheckerTest::findCrossing(std::vector<int> line1,
-                                     std::vector<int> line2) {
-    Vec2f crossingPoint;
-    float r1 = static_cast<float>(line1[1]);
-    float r2 = static_cast<float>(line2[1]);
-    float ang1, ang2;
-    float y1, y2;
-
-    ang1 = static_cast<float>(line1[0]) / 180.0f * M_PI;
-    ang2 = static_cast<float>(line2[0]) / 180.0f * M_PI;
-
-    float x, y;
-    x = (r1 * cos(ang2) - r2 * cos(ang1)) / sin(ang1 - ang2);
-    y = (r1 * sin(ang2) - r2 * sin(ang1)) / sin(ang1 - ang2);
-
-    crossingPoint.set((x + y) / sqrt(2.0), (y - x) / sqrt(2.0));
-
-    //ALOGV("Crosspoint at (%f, %f)", crossingPoint.x(), crossingPoint.y());
-    return crossingPoint;
-}
-
-// Verifies whether two opposite corners on a quadrilateral actually can be
-// the two corners of a color checker.
-bool ColorCheckerTest::verifyPointPair(Vec2f pointUpperLeft,
-                                       Vec2f pointBottomRight,
-                                       Vec2f* pointCenter,
-                                       Vec3i* color) {
-    bool success = true;
-
-    /** 5 and 30 are the threshold tuned for resolution 640*480*/
-    if ((pointUpperLeft.x() < 0) ||
-        (pointUpperLeft.x() >= mImage->getHeight()) ||
-        (pointUpperLeft.y() < 0) ||
-        (pointUpperLeft.y() >= mImage->getWidth()) ||
-        (pointBottomRight.x() < 0) ||
-        (pointBottomRight.x() >= mImage->getHeight()) ||
-        (pointBottomRight.y() < 0) ||
-        (pointBottomRight.y() >= mImage->getWidth()) ||
-        (std::abs(pointUpperLeft.x() - pointBottomRight.x()) <= 5) ||
-        (std::abs(pointUpperLeft.y() - pointBottomRight.y()) <= 5) ||
-        (std::abs(pointUpperLeft.x() - pointBottomRight.x()) >= 30) ||
-        (std::abs(pointUpperLeft.y() - pointBottomRight.y()) >= 30)) {
-
-        // If any of the quadrilateral corners are out of the image or if
-        // the distance between them are too large or too big, the quadrilateral
-        // could not be one of the checkers
-        success = false;
-    } else {
-        // Find the checker center if the corners of the rectangle meet criteria
-        pointCenter->set((pointUpperLeft.x() + pointBottomRight.x()) / 2.0f,
-                       (pointUpperLeft.y() + pointBottomRight.y()) / 2.0f);
-        color->set(mImage->getPixelValue(*pointCenter).r(),
-                   mImage->getPixelValue(*pointCenter).g(),
-                   mImage->getPixelValue(*pointCenter).b());
-        ALOGV("Color at (%f, %f) is (%d, %d, %d)", pointCenter->x(), pointCenter->y(),color->r(), color->g(), color->b());
-    }
-    return success;
-}
-
-// Verifies the color checker centers and finds the match between the detected
-// color checker and the reference MacBeth color checker
-void ColorCheckerTest::verifyColorGrid() {
-    ALOGV("Start looking for Color Grid");
-    int numHorizontalLines = mCandidateColors.size();
-    int numVerticalLines = mCandidateColors[0].size();
-    bool success = false;
-
-    // Computes the standard deviation of one row/column of the proposed color
-    // checker. Discards the row/column if the std is below a threshold.
-    for (int i = 0; i < numHorizontalLines; ++i) {
-        Vec3f meanColor(0.f, 0.f, 0.f);
-        int numNonZero = 0;
-
-        for (int j = 0; j < numVerticalLines; ++j) {
-            if (mCandidateColors[i][j] != NULL) {
-                ALOGV("candidate color (%d, %d) is (%d, %d, %d)", i, j, mCandidateColors[i][j]->r(), mCandidateColors[i][j]->g(), mCandidateColors[i][j]->b());
-
-                meanColor = meanColor + (*mCandidateColors[i][j]);
-                ++numNonZero;
-            }
-        }
-        if (numNonZero > 0) {
-            meanColor = meanColor / numNonZero;
-        }
-        ALOGV("Mean color for vertical direction computed!");
-
-        float std = 0;
-        for (int j = 0; j < numVerticalLines; ++j) {
-            if (mCandidateColors[i][j] != NULL) {
-                std += mCandidateColors[i][j]->squareDistance<float>(meanColor);
-            }
-        }
-        if (numNonZero > 0) {
-            std = sqrt(std / (3 * numNonZero));
-        }
-        ALOGV("st. deviation for the %d dir1 is %d", i, static_cast<int>(std));
-
-        if ((std <= 30) && (numNonZero > 1)) {
-            for (int j = 0; j < numVerticalLines; ++j) {
-                if (mCandidateColors[i][j] != NULL) {
-                    delete mCandidateColors[i][j];
-                    mCandidateColors[i][j] = NULL;
-                }
-            }
-        }
-    }
-
-    // Discards the column/row of the color checker if the std is below a
-    // threshold.
-    for (int j = 0; j < numVerticalLines; ++j) {
-        Vec3f meanColor(0.f, 0.f, 0.f);
-        int numNonZero = 0;
-
-        for (int i = 0; i < numHorizontalLines; ++i) {
-            if (mCandidateColors[i][j] != NULL) {
-                meanColor = meanColor + (*mCandidateColors[i][j]);
-                ++numNonZero;
-            }
-        }
-        if (numNonZero > 0) {
-            meanColor = meanColor / numNonZero;
-        }
-
-        float std = 0;
-        for (int i = 0; i < numHorizontalLines; ++i) {
-            if (mCandidateColors[i][j] != NULL) {
-                std += mCandidateColors[i][j]->squareDistance<float>(meanColor);
-            }
-        }
-        if (numNonZero > 0) {
-            std = sqrt(std / (3 * numNonZero));
-        }
-
-        ALOGV("st. deviation for the %d dir2 is %d", j, static_cast<int>(std));
-
-        if ((std <= 30) && (numNonZero > 1)) {
-            for (int i = 0; i < numHorizontalLines; ++i) {
-                if (mCandidateColors[i][j] != NULL) {
-                    delete mCandidateColors[i][j];
-                    mCandidateColors[i][j] = NULL;
-                }
-            }
-        }
-    }
-
-    for (int i = 0; i < numHorizontalLines; ++i) {
-        for (int j = 0; j < numVerticalLines; ++j) {
-            if (mCandidateColors[i][j] != NULL) {
-                ALOGV("position (%d, %d) is at (%f, %f) with color (%d, %d, %d)",
-                     i, j,
-                     mCandidatePositions[i][j]->x(),
-                     mCandidatePositions[i][j]->y(),
-                     mCandidateColors[i][j]->r(),
-                     mCandidateColors[i][j]->g(),
-                     mCandidateColors[i][j]->b());
-            } else {
-                ALOGV("position (%d, %d) is 0", i, j);
-            }
-        }
-    }
-
-    // Finds the match between the detected color checker and the reference
-    // MacBeth color checker.
-    int rowStart = 0;
-    int rowEnd = 0;
-
-    // Loops until all dectected color checker has been processed.
-    while (!success) {
-        int columnStart = 0;
-        int columnEnd = 0;
-        bool isRowStart = false;
-        bool isRowEnd = true;
-
-        // Finds the row start of the next block of detected color checkers.
-        while ((!isRowStart) && (rowStart <  numHorizontalLines)) {
-            for (int j = 0; j < numVerticalLines; ++j) {
-                if (mCandidateColors[rowStart][j] != NULL) {
-                    isRowStart = true;
-                }
-            }
-            ++rowStart;
-        }
-        rowStart--;
-        rowEnd = rowStart;
-        ALOGV("rowStart is %d", rowStart);
-
-        // Finds the row end of the next block of detected color checkers.
-        while ((isRowEnd) && (rowEnd < numHorizontalLines)) {
-            isRowEnd = false;
-            for (int j = 0; j < numVerticalLines; ++j) {
-                if (mCandidateColors[rowEnd][j] != NULL) {
-                    isRowEnd= true;
-                }
-            }
-            if (isRowEnd) {
-                ++rowEnd;
-            }
-        }
-        if ((!isRowEnd) && isRowStart) {
-            rowEnd--;
-        }
-        if ((isRowEnd) && (rowEnd == numHorizontalLines)) {
-            rowEnd--;
-            isRowEnd = false;
-        }
-        ALOGV("rowEnd is %d", rowEnd);
-
-        // Matches color checkers between the start row and the end row.
-        bool successVertical = false;
-
-        while (!successVertical) {
-            bool isColumnEnd = true;
-            bool isColumnStart = false;
-
-            // Finds the start column of the next block of color checker
-            while ((!isColumnStart) && (columnStart < numVerticalLines)) {
-                if (mCandidateColors[rowStart][columnStart] != NULL) {
-                    isColumnStart = true;
-                }
-                ++columnStart;
-            }
-            columnStart--;
-            columnEnd = columnStart;
-
-            // Finds the end column of the next block of color checker
-            while ((isColumnEnd) && (columnEnd < numVerticalLines)) {
-                isColumnEnd = false;
-                if (mCandidateColors[rowStart][columnEnd] != NULL)
-                    isColumnEnd = true;
-                if (isColumnEnd) {
-                    ++columnEnd;
-                }
-            }
-
-            if ((!isColumnEnd) && isColumnStart) {
-                columnEnd--;
-            }
-            if ((isColumnEnd) && (columnEnd == numVerticalLines)) {
-                columnEnd--;
-                isColumnEnd = false;
-            }
-
-            // Finds the best match on the MacBeth reference color checker for
-            // the continuous block of detected color checker
-            if (isRowStart && (!isRowEnd) &&
-                isColumnStart && (!isColumnEnd)) {
-                findBestMatch(rowStart, rowEnd, columnStart, columnEnd);
-            }
-            ALOGV("FindBestMatch for %d, %d, %d, %d", rowStart,
-                 rowEnd, columnStart, columnEnd);
-
-            // If the column search finishes, go out of the loop
-            if (columnEnd >= numVerticalLines - 1) {
-                successVertical = true;
-            } else {
-                columnStart = columnEnd + 1;
-            }
-        }
-        ALOGV("Continuing to search for direction 1");
-
-        // If the row search finishes, go out of the loop
-        if (rowEnd >= numHorizontalLines - 1) {
-            success = true;
-        } else {
-            rowStart = rowEnd + 1;
-        }
-    }
-
-    for (int i = 0; i < 4; ++i) {
-        for (int j = 0; j < 6; ++j) {
-            if (mMatchPositions[i][j] != NULL) {
-                ALOGV("Reference Match position for (%d, %d) is (%f, %f)", i, j,
-                     mMatchPositions[i][j]->x(), mMatchPositions[i][j]->y());
-            }
-        }
-    }
-
-    fillRefColorGrid();
-}
-
-// Finds the best match on the MacBeth color checker for the continuous block of
-// detected color checkers bounded by row i1, row i2 and column j1 and column j2
-// Assumes that the subsample is less than 4*6.
-void ColorCheckerTest::findBestMatch(int i1, int i2, int j1, int j2) {
-    int numHorizontalGrid = i2 - i1 + 1;
-    int numVerticalGrid = j2 - j1 + 1;
-
-    if (((numHorizontalGrid > 1) || (numVerticalGrid > 1)) &&
-        (numHorizontalGrid <= 4) && (numVerticalGrid <= 6)) {
-        ALOGV("i1, j2, j1, j2 is %d, %d, %d, %d", i1, i2, j1, j2);
-        float minError;
-        float error = 0.f;
-        int horizontalMatch, verticalMatch;
-
-        // Finds the match start point where the error is minimized.
-        for (int i = 0; i < numHorizontalGrid; ++i) {
-            for (int j = 0; j < numVerticalGrid; ++j) {
-                if (mCandidateColors[i1 + i][j1 + j] != NULL) {
-                    error += mCandidateColors[i1 + i][j1 + j]->squareDistance<int>(
-                            *mReferenceColors[i][j]);
-                }
-            }
-        }
-        ALOGV("Error is %f", error);
-        minError = error;
-        horizontalMatch = 0;
-        verticalMatch = 0;
-
-        for (int i = 0; i <= 4 - numHorizontalGrid; ++i) {
-            for (int j = 0; j <= 6 - numVerticalGrid; ++j) {
-                error = 0.f;
-
-                for (int id = 0; id < numHorizontalGrid; ++id) {
-                    for (int jd = 0; jd < numVerticalGrid; ++jd) {
-                        if (mCandidateColors[i1 + id][j1 + jd] != NULL) {
-                            error += mCandidateColors[i1 + id][j1 + jd]->
-                                    squareDistance<int>(
-                                            *mReferenceColors[i + id][j + jd]);
-                        }
-                    }
-                }
-
-                if (error < minError) {
-                    minError = error;
-                    horizontalMatch = i;
-                    verticalMatch = j;
-                }
-                ALOGV("Processed %d, %d and error is %f", i, j, error );
-            }
-        }
-
-        for (int id = 0; id < numHorizontalGrid; ++id) {
-            for (int jd = 0; jd < numVerticalGrid; ++jd) {
-                if (mCandidatePositions[i1 + id][j1 + jd] != NULL) {
-                    mMatchPositions[horizontalMatch + id][verticalMatch + jd] =
-                            new Vec2f(mCandidatePositions[i1 + id][j1 + jd]->x(),
-                                      mCandidatePositions[i1 + id][j1 + jd]->y());
-                }
-            }
-        }
-        ALOGV("Grid match starts at %d, %d", horizontalMatch, verticalMatch);
-    }
-}
-
-// Finds the boundary of a color checker by its color similarity to the center.
-// Also predicts the location of unmatched checkers.
-void ColorCheckerTest::fillRefColorGrid() {
-    int rowStart = 0;
-    int columnStart = 0;
-    bool foundStart = true;
-
-    for (int i = 0; (i < 4) && foundStart; ++i) {
-        for (int j = 0; (j < 6) && foundStart; ++j) {
-            if (mMatchPositions[i][j] != NULL) {
-                rowStart = i;
-                columnStart = j;
-                foundStart = false;
-            }
-        }
-    }
-    ALOGV("First match found at (%d, %d)", rowStart, columnStart);
-
-    float rowDistance, columnDistance;
-    rowDistance = 0;
-    columnDistance = 0;
-    int numRowGrids = 0;
-    int numColumnGrids = 0;
-
-    for (int i = rowStart; i < 4; ++i) {
-        for (int j = columnStart; j < 6; ++j) {
-            if (mMatchPositions[i][j] != NULL) {
-                if (i > rowStart) {
-                    ++numRowGrids;
-                    rowDistance += (mMatchPositions[i][j]->x() -
-                                mMatchPositions[rowStart][columnStart]->x()) /
-                                static_cast<float>(i - rowStart);
-                }
-                if (j > columnStart) {
-                    ++numColumnGrids;
-                    columnDistance += (mMatchPositions[i][j]->y() -
-                                mMatchPositions[rowStart][columnStart]->y()) /
-                                static_cast<float>(j - columnStart);
-                }
-            }
-        }
-    }
-
-    if ((numRowGrids > 0) && (numColumnGrids > 0)) {
-        rowDistance = rowDistance / numRowGrids;
-        columnDistance = columnDistance / numColumnGrids;
-        ALOGV("delta is %f, %f", rowDistance, columnDistance);
-
-        for (int i = 0; i < 4; ++i) {
-            for (int j = 0 ; j < 6; ++j) {
-                if (mMatchPositions[i][j] == NULL) {
-                    mMatchPositions[i][j] = new Vec2f(
-                            mMatchPositions[rowStart][columnStart]->x() +
-                                    (i - rowStart) * rowDistance,
-                            mMatchPositions[rowStart][columnStart]->y() +
-                                    (j - columnStart) * columnDistance);
-                }
-            }
-        }
-        for (int i = 0; i < 4; ++i) {
-            for (int j = 0; j < 6; ++j) {
-                float radius = 0;
-                Vec3i color = mImage->getPixelValue(*mMatchPositions[i][j]);
-                Vec3f meanColor(0.f , 0.f, 0.f);
-
-                int numPixels = 0;
-                for (int ii  = static_cast<int>(mMatchPositions[i][j]->x() -
-                                                rowDistance/2);
-                     ii <= static_cast<int>(mMatchPositions[i][j]->x() +
-                                            rowDistance/2);
-                     ++ii) {
-                    for (int jj = static_cast<int>(mMatchPositions[i][j]->y() -
-                                                   columnDistance/2);
-                         jj <= static_cast<int>(mMatchPositions[i][j]->y() +
-                                                columnDistance/2);
-                         ++jj) {
-                        if ((ii >= 0) && (ii < mImage->getHeight()) &&
-                            (jj >= 0) && (jj < mImage->getWidth())) {
-                            Vec3i pixelColor = mImage->getPixelValue(ii,jj);
-                            float error = color.squareDistance<int>(pixelColor);
-
-                            if (error < COLOR_ERROR_THRESHOLD) {
-                                drawPoint(ii, jj, *mReferenceColors[i][j]);
-                                meanColor = meanColor + pixelColor;
-                                numPixels++;
-                                Vec2i pixelPosition(ii, jj);
-
-                                if (pixelPosition.squareDistance<float>(
-                                        *mMatchPositions[i][j]) > radius) {
-                                    radius = pixelPosition.squareDistance<float>(
-                                            *mMatchPositions[i][j]);
-                                }
-                            }
-                        }
-                    }
-                }
-
-                /** Computes the radius of the checker.
-                 * The above computed radius is the squared distance to the
-                 * furthest point with a matching color. To be conservative, we
-                 * only consider an area with radius half of the above computed
-                 * value. Since radius is computed as a squared root, the one
-                 * that will be recorded is 1/4 of the above computed value.
-                 */
-                mMatchRadius[i][j] = radius / 4.f;
-                mMatchColors[i][j] = new Vec3f(meanColor / numPixels);
-
-                ALOGV("Reference color at (%d, %d) is (%d, %d, %d)", i, j,
-                     mReferenceColors[i][j]->r(),
-                     mReferenceColors[i][j]->g(),
-                     mReferenceColors[i][j]->b());
-                ALOGV("Average color at (%d, %d) is (%f, %f, %f)", i, j,
-                     mMatchColors[i][j]->r(),
-                     mMatchColors[i][j]->g(),
-                     mMatchColors[i][j]->b());
-                ALOGV("Radius is %f", mMatchRadius[i][j]);
-            }
-        }
-
-        mSuccess = true;
-    }
-}
diff --git a/apps/CtsVerifier/lib/colorchecker/exposurecompensationtest.cpp b/apps/CtsVerifier/lib/colorchecker/exposurecompensationtest.cpp
deleted file mode 100644
index da9fc40..0000000
--- a/apps/CtsVerifier/lib/colorchecker/exposurecompensationtest.cpp
+++ /dev/null
@@ -1,96 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#define LOG_NDEBUG 0
-
-#define LOG_TAG "ExposureCompensationTest"
-#include <utils/Log.h>
-#include <utils/Timers.h>
-#include <cmath>
-#include <string>
-#include <stdio.h>
-
-#include "vec2.h"
-#include "vec3.h"
-#include "exposurecompensationtest.h"
-
-const float GAMMA_CORRECTION = 2.2f;
-void ExposureCompensationTest::processData() {
-    ALOGV("Start Processing Exposure Compensation Test Data!");
-    clearDebugImage();
-
-    if (mDebugText != NULL) {
-        delete mDebugText;
-        mDebugText = NULL;
-    }
-
-    int numTests = mExposureValues.size();
-    int numPatches = mCheckerColors[0].size();
-    ALOGV("Processing %d tests with %d patches", numTests, numPatches);
-
-    mDebugText = new char[320 * numTests];
-    mDebugText[0] = 0;
-    char* debugText = new char[50];
-
-    Vec3i red(255, 0, 0);
-    Vec3i green(0, 255, 0);
-    Vec3i blue(0, 0, 255);
-
-    float minExposure = -3.0f;
-    float scale = 9.0f;
-    for (int i = 0; i < numTests; ++i) {
-        snprintf(debugText, 50, "Exposure is %f \n", mExposureValues[i]);
-        strcat(mDebugText, debugText);
-        for (int j = 0; j < numPatches; ++j) {
-            int exposureRed = static_cast<int>((
-                log(static_cast<float>(mReferenceColors[j].r()))
-                / log(2.0f) * GAMMA_CORRECTION +
-                mExposureValues[i] - minExposure) * scale);
-            int exposureGreen = static_cast<int>((
-                log(static_cast<float>(mReferenceColors[j].g()))
-                / log(2.0f) * GAMMA_CORRECTION +
-                mExposureValues[i] - minExposure) * scale);
-            int exposureBlue = static_cast<int>((
-                log(static_cast<float>(mReferenceColors[j].b()))
-                / log(2.0f) * GAMMA_CORRECTION +
-                mExposureValues[i] - minExposure) * scale);
-
-            snprintf(debugText, 50, "%d %f %d %f %d %f \n",
-                    exposureRed, mCheckerColors[i][j].r(),
-                    exposureGreen, mCheckerColors[i][j].g(),
-                    exposureBlue, mCheckerColors[i][j].b());
-
-            ALOGV("%s", debugText);
-            strcat(mDebugText, debugText);
-
-            drawPoint(200 - exposureRed, mCheckerColors[i][j].r(), red);
-            drawPoint(200 - exposureGreen, mCheckerColors[i][j].g(), green);
-            drawPoint(200 - exposureBlue, mCheckerColors[i][j].b(), blue);
-        }
-    }
-    mExposureValues.clear();
-    mCheckerColors.clear();
-}
-
-void ExposureCompensationTest::initializeReferenceColors() {
-    mReferenceColors.resize(6);
-
-    mReferenceColors[0].set(243, 243, 242);
-    mReferenceColors[1].set(200, 200, 200);
-    mReferenceColors[2].set(160, 160, 160);
-    mReferenceColors[3].set(122, 122, 121);
-    mReferenceColors[4].set(85, 85, 85);
-    mReferenceColors[5].set(52, 52, 52);
-}
diff --git a/apps/CtsVerifier/lib/colorchecker/imagetesthandler.cpp b/apps/CtsVerifier/lib/colorchecker/imagetesthandler.cpp
deleted file mode 100644
index cc3bca9..0000000
--- a/apps/CtsVerifier/lib/colorchecker/imagetesthandler.cpp
+++ /dev/null
@@ -1,102 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#define LOG_NDEBUG 0
-
-#define LOG_TAG "ImageTestHandler"
-#include <utils/Log.h>
-#include <utils/Timers.h>
-#include <cmath>
-#include <cstring>
-
-#include "vec2.h"
-#include "vec3.h"
-#include "imagetesthandler.h"
-
-void ImageTestHandler::initDebugImage() {
-    mDebugOutput = NULL;
-}
-
-// Initializes the  debug image with a given height and width.
-void ImageTestHandler::initDebugImage(int debugHeight,
-                                      int debugWidth) {
-    mDebugOutput = NULL;
-    mDebugOutput = new unsigned char[debugHeight * debugWidth * 4];
-    memset(mDebugOutput, 0, debugHeight * debugWidth * 4);
-
-    mDebugHeight = debugHeight;
-    mDebugWidth = debugWidth;
-}
-
-// Copies an existing image to the debug image.
-void ImageTestHandler::copyDebugImage(int inputHeight, int inputWidth,
-                                      const unsigned char* inputImage) {
-    if ((inputHeight == mDebugHeight) && (inputWidth == mDebugWidth)) {
-        ALOGV("Copying debug images");
-        memcpy(mDebugOutput, inputImage, mDebugHeight * mDebugWidth * 4);
-    }
-}
-
-void ImageTestHandler::clearDebugImage() {
-    if (mDebugOutput != NULL) {
-        delete[] mDebugOutput;
-        mDebugOutput = new unsigned char[mDebugHeight * mDebugWidth * 4];
-        memset(mDebugOutput, 0, mDebugHeight * mDebugWidth * 4);
-    }
-}
-
-
-// Draws a point of a given color.
-void ImageTestHandler::drawPoint(int row, int column, const Vec3i &color) {
-    if ((row >= 0) && (column >= 0) &&
-        (column < mDebugWidth) && (row < mDebugHeight)) {
-        mDebugOutput[(row*mDebugWidth + column) * 4] = color.r();
-        mDebugOutput[(row*mDebugWidth + column) * 4+1] = color.g();
-        mDebugOutput[(row*mDebugWidth + column) * 4+2] = color.b();
-        mDebugOutput[(row*mDebugWidth + column) * 4+3] = 255;
-    }
-}
-
-// Draws a point in Vec2 format of a given color.
-void ImageTestHandler::drawPoint(const Vec2i &point, const Vec3i &color) {
-    drawPoint((int) point.y(), (int) point.x(), color);
-}
-
-// Draws a line of a given color.
-void ImageTestHandler::drawLine(int angle, int radius, const Vec3i &color) {
-    const int r = color.r();
-    const int g = color.g();
-    const int b = color.b();
-    const int a = 255;
-
-    int shiftedMin = -113;
-    int shiftedMax = 83;
-
-    float radiusDouble = static_cast<float>(radius);
-
-    float angleRad = static_cast<float>(angle) * M_PI / 180.0;
-
-    //ALOGV("draw line for (%d, %d)", angle, radius);
-    for (int i = shiftedMin; i <= shiftedMax; ++i) {
-        float j;
-
-        assert(angle != 0);
-        j = (i - radiusDouble / sin(angleRad)) * tan(angleRad);
-        float x = (static_cast<float>(i) + j) / sqrt(2.0);
-        float y = (j - static_cast<float>(i)) / sqrt(2.0);
-
-        drawPoint(x, y, color);
-    }
-}
diff --git a/apps/CtsVerifier/lib/colorchecker/meteringtest.cpp b/apps/CtsVerifier/lib/colorchecker/meteringtest.cpp
deleted file mode 100644
index 47de5d8..0000000
--- a/apps/CtsVerifier/lib/colorchecker/meteringtest.cpp
+++ /dev/null
@@ -1,102 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#define LOG_NDEBUG 0
-
-#define LOG_TAG "MeteringTest"
-#include <utils/Log.h>
-#include <utils/Timers.h>
-#include <cmath>
-#include <string>
-
-#include "vec2.h"
-#include "vec3.h"
-#include "meteringtest.h"
-
-const float kOverExposure = 230.f;
-const float kEqThreshold = 0.05f;
-// Processes the checker colors stored by comparing the pixel values from the
-// two scenarios in a test.
-void MeteringTest::processData() {
-    ALOGV("Start Processing Metering Test Data!");
-
-    int numTests = mCheckerColors.size() / 2;
-    mNumPatches = 0;
-
-    if (numTests > 0) {
-        mNumPatches = mCheckerColors[0].size();
-    }
-
-    for (int i = 0; i < numTests; ++i) {
-        mComparisonResults.push_back(
-                isEquivalentTo((&mCheckerColors[i * 2]),
-                               (&mCheckerColors[i * 2 + 1])));
-        mComparisonResults.push_back(
-                isDarkerThan((&mCheckerColors[i * 2]),
-                             (&mCheckerColors[i * 2 + 1])));
-    }
-}
-
-void MeteringTest::clearData() {
-    mComparisonResults.clear();
-    mCheckerColors.clear();
-}
-
-// Compares two given arrays of pixel values and decide whether the first one is
-// significantly darker than the second one.
-bool MeteringTest::isDarkerThan(
-        const std::vector<Vec3f>* checkerColors1,
-        const std::vector<Vec3f>* checkerColors2) const {
-    float meanRatio = 0.f;
-    int meanNumCount = 0;
-
-    for (int i = 0; i < mNumPatches; ++i) {
-        float luminance1 = (*checkerColors1)[i].convertToLuminance();
-        float luminance2 = (*checkerColors2)[i].convertToLuminance();
-
-        // Out of the saturation rage, define 5% as a margin for being
-        // significantly brighter.
-        if ((luminance2 < kOverExposure) && (luminance1 != 0.f)) {
-            meanRatio += luminance2 / luminance1;
-            ++meanNumCount;
-        }
-    }
-    meanRatio = meanRatio / meanNumCount;
-
-    return (meanRatio > 1 + kEqThreshold);
-}
-
-// Compares the two givn arrays of pixel values and decide whether they are
-// equivalent within an acceptable range.
-bool MeteringTest::isEquivalentTo(
-        const std::vector<Vec3f>* checkerColors1,
-        const std::vector<Vec3f>* checkerColors2) const {
-    float meanRatio = 0.f;
-    int meanNumCount = 0;
-
-    for (int i = 0; i < mNumPatches; ++i) {
-        float luminance1 = (*checkerColors1)[i].convertToLuminance();
-        float luminance2 = (*checkerColors2)[i].convertToLuminance();
-        ALOGV("Luma_1 and Luma_2 is %f, %f", luminance1, luminance2);
-
-        if ((luminance1 < kOverExposure) && (luminance2 < kOverExposure)) {
-              meanRatio += luminance2 / luminance1;
-              ++meanNumCount;
-        }
-    }
-    meanRatio = meanRatio / meanNumCount;
-
-    return ((meanRatio >= 1 - kEqThreshold) && (meanRatio <= 1 + kEqThreshold));
-}
diff --git a/apps/CtsVerifier/lib/colorchecker/testingimage.cpp b/apps/CtsVerifier/lib/colorchecker/testingimage.cpp
deleted file mode 100644
index 28f025f..0000000
--- a/apps/CtsVerifier/lib/colorchecker/testingimage.cpp
+++ /dev/null
@@ -1,190 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#define LOG_NDEBUG 0
-
-#define LOG_TAG "TestingImage"
-#include <utils/Log.h>
-#include <utils/Timers.h>
-#include <string.h>
-#include <cmath>
-#include <vector>
-#include <assert.h>
-#include "vec3.h"
-
-#include "testingimage.h"
-
-const float GAMMA_CORRECTION = 2.2f;
-
-// Constructs an instance with the given image byte array.
-TestingImage::TestingImage(const unsigned char* inputImage,
-                           int inputHeight, int inputWidth,
-                           int inputChannel, int inputRowSpan) {
-    mImage = new unsigned char[inputRowSpan * inputHeight];
-
-    ALOGV("mImage format created! with size as %d, %d, %d",
-         inputRowSpan, inputHeight, inputChannel);
-    mWidth = inputWidth;
-    mHeight = inputHeight;
-    mChannels = inputChannel;
-    mRowSpan = mWidth * mChannels;
-
-    for (int i = 0; i < mHeight; ++i) {
-        for (int j = 0; j < mWidth; ++j) {
-            for (int k = 0; k < mChannels; ++k) {
-                mImage[i * mRowSpan + j* mChannels + k] =
-                        inputImage[i * inputRowSpan + j * inputChannel + k];
-            }
-        }
-    }
-    ALOGV("mImage converted!");
-}
-
-// Constructs an instance with the given image and resize it to a new size.
-TestingImage::TestingImage(const unsigned char* inputImage,
-                           int inputHeight, int inputWidth,
-                           int inputChannel, int inputRowSpan,
-                           int newHeight, int newWidth) {
-    mImage = new unsigned char[newHeight * newWidth * inputChannel];
-
-    ALOGV("mImage format created! with size as %d, %d, %d",
-         newHeight, newWidth, inputChannel);
-    mHeight = newHeight;
-    mWidth = newWidth;
-    mChannels = inputChannel;
-    mRowSpan = mWidth * mChannels;
-
-    // Computes how many pixels in the original image corresponds to one pixel
-    // in the new image.
-    int heightScale = inputHeight / newHeight;
-    int widthScale = inputWidth / newWidth;
-
-    // Average the corresponding pixels in the original image to compute the
-    // pixel value of the new image.
-    for (int i = 0; i < mHeight; ++i) {
-        for (int j = 0; j < mWidth; ++j) {
-            for (int k = 0; k < mChannels; ++k) {
-                int pixelValue = 0;
-
-                for (int l = 0; l < heightScale; ++l) {
-                    for (int m = 0; m < widthScale; ++m) {
-                        pixelValue += inputImage[
-                                (i * heightScale + l) * inputRowSpan
-                                + (j * widthScale + m) * inputChannel + k];
-                    }
-                }
-                pixelValue = pixelValue / (heightScale * widthScale);
-                mImage[i * mRowSpan + j * mChannels + k] =
-                        (unsigned char) pixelValue;
-            }
-        }
-    }
-}
-
-TestingImage::~TestingImage() {
-    if (mImage!=NULL) {
-        delete[] mImage;
-    }
-}
-
-int TestingImage::getPixelValue(int row, int column, int channel) const {
-    assert ((row >= 0) && (row < mHeight));
-    assert ((column >= 0) && (column < mWidth));
-    assert ((channel >= 0) && (channel < mChannels));
-    return (int)mImage[row * mRowSpan + column * mChannels + channel];
-}
-
-Vec3i TestingImage::getPixelValue(int row, int column) const {
-    Vec3i current_color(getPixelValue(row, column, 0),
-                        getPixelValue(row, column, 1),
-                        getPixelValue(row, column, 2));
-    return current_color;
-}
-
-Vec3i TestingImage::getPixelValue(const Vec2i &pixelPosition) const {
-    return getPixelValue(pixelPosition.x(), pixelPosition.y());
-}
-
-Vec3i TestingImage::getPixelValue(const Vec2f &pixelPosition) const {
-    return getPixelValue(static_cast<int>(pixelPosition.x()),
-                         static_cast<int>(pixelPosition.y()));
-}
-
-// Returns a vector of the colors in the requested block of color checkers.
-// The vector is formatted by going through the block from left to right and
-// from top to bottom.
-const std::vector<Vec3f>* TestingImage::getColorChecker(
-      int rowStart, int rowEnd, int columnStart, int columnEnd,
-      const std::vector<std::vector< Vec2f > >* centerAddress,
-      const std::vector<std::vector< float > >* radiusAddress) const {
-    std::vector<Vec3f>* checkerColors = new std::vector<Vec3f>;
-
-    // Average the pixel values of the pixels within the given radius to the
-    // given center position.
-    for (int i = rowStart; i < rowEnd; ++i) {
-        for (int j = columnStart; j < columnEnd; ++j) {
-            float radius = sqrt((*radiusAddress)[i][j]);
-            Vec2f center((*centerAddress)[i][j].x(),
-                               (*centerAddress)[i][j].y());
-            Vec3f meanColor(0.f, 0.f, 0.f);
-            int numPixels = 0;
-
-            for (int ii = static_cast<int>(center.x() - radius);
-                 ii < static_cast<int>(center.x() + radius); ++ii) {
-                for (int jj = static_cast<int>(center.y() - radius);
-                     jj < static_cast<int>(center.y() + radius); ++jj) {
-
-                    Vec2i pixelPosition(ii,jj);
-                    if (pixelPosition.squareDistance<float>(center) <
-                        (*radiusAddress)[i][j]) {
-                        meanColor = meanColor + getPixelValue(pixelPosition);
-                        ++numPixels;
-                    }
-                }
-            }
-            meanColor = meanColor / numPixels;
-            checkerColors->push_back(meanColor);
-        }
-    }
-
-    return checkerColors;
-}
-
-bool TestingImage::rgbToGrayScale(unsigned char* grayLayer) const {
-    if (mChannels == 4) {
-        for (int i = 0; i < mWidth; i++) {
-            for (int j = 0; j < mHeight; j++) {
-                float redLinear = pow(getPixelValue(j, i, 0),
-                                       GAMMA_CORRECTION);
-                float greenLinear = pow(getPixelValue(j,i,1),
-                                         GAMMA_CORRECTION);
-                float blueLinear = pow(getPixelValue(j,i,2),
-                                        GAMMA_CORRECTION);
-
-                // Computes the luminance value
-                grayLayer[j * mWidth + i] =
-                        (unsigned char)((int)pow((0.299f * redLinear
-                                                  + 0.587f * greenLinear
-                                                  + 0.114f * blueLinear),
-                                                  1/GAMMA_CORRECTION));
-            }
-        }
-
-        return true;
-    } else {
-
-        return false;
-    }
-}
diff --git a/apps/CtsVerifier/lib/colorchecker/vec2.cpp b/apps/CtsVerifier/lib/colorchecker/vec2.cpp
deleted file mode 100644
index 29736bb..0000000
--- a/apps/CtsVerifier/lib/colorchecker/vec2.cpp
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#define LOG_NDEBUG 0
-
-#define LOG_TAG "Vec2"
-#include <utils/Log.h>
-#include <utils/Timers.h>
-
-#include "vec2.h"
diff --git a/apps/CtsVerifier/lib/colorchecker/vec3.cpp b/apps/CtsVerifier/lib/colorchecker/vec3.cpp
deleted file mode 100644
index ac16620..0000000
--- a/apps/CtsVerifier/lib/colorchecker/vec3.cpp
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#define LOG_NDEBUG 0
-
-#define LOG_TAG "Vec3"
-#include <utils/Log.h>
-#include <utils/Timers.h>
-
-#include "vec3.h"
diff --git a/apps/CtsVerifier/lib/colorchecker/whitebalancetest.cpp b/apps/CtsVerifier/lib/colorchecker/whitebalancetest.cpp
deleted file mode 100644
index 6413a2b..0000000
--- a/apps/CtsVerifier/lib/colorchecker/whitebalancetest.cpp
+++ /dev/null
@@ -1,122 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#define LOG_NDEBUG 0
-
-#define LOG_TAG "WhiteBalanceTest"
-#include <utils/Log.h>
-#include <utils/Timers.h>
-#include <cmath>
-#include <string>
-
-#include "vec2.h"
-#include "vec3.h"
-#include "whitebalancetest.h"
-
-// White point in XYZ color space under 5200k illumination.
-const Vec3f kDaylightWhitePoint(0.9781f, 1.f, 0.9021f);
-
-// Process the data of checker colors collected under different white balance.
-// Assuming the Daylight CCT is set to 5200k, compute the CCT of other white
-// balance modes.
-void WhiteBalanceTest::processData() {
-    ALOGV("Start Processing White Balance Test Data!");
-
-    int numPatches = mCheckerColors.size();
-    ALOGV("Processing %d tests with %d patches", 2, numPatches);
-
-    std::vector<Vec3f> xyzColors(numPatches);
-    for (int j = 0; j < numPatches; ++j) {
-        Vec3f xyzCheckerColor = initializeFromRGB(mCheckerColors[j]);
-        xyzColors[j] = xyzCheckerColor;
-        ALOGV("XYZ coordinate is %f, %f, %f", xyzCheckerColor.r(),
-              xyzCheckerColor.g(), xyzCheckerColor.b());
-    }
-
-    Vec3f meanScale(0.f, 0.f, 0.f);
-
-    if (mMode == "daylight") {
-        mXyzColorsDaylight = xyzColors;
-        // For testing the auto white balance mode. Compute a CCT that would
-        // map the gray checkers to a white point.
-        for (int j = 1; j < numPatches; ++j) {
-            meanScale = meanScale +
-                    (mXyzColorsDaylight[j] / kDaylightWhitePoint);
-        }
-    } else {
-        for (int j = 1; j < numPatches; ++j) {
-            meanScale = meanScale + (mXyzColorsDaylight[j] / xyzColors[j]);
-        }
-    }
-
-    meanScale = meanScale / (numPatches - 1);
-    ALOGV("Scale: %f, %f, %f", meanScale.r(), meanScale.g(), meanScale.b());
-
-    Vec3f whitePoint;
-    whitePoint = meanScale * kDaylightWhitePoint;
-
-    ALOGV("White point is %f, %f, %f", whitePoint.r(),
-         whitePoint.g(), whitePoint.b());
-
-    mCorrelatedColorTemp = findCorrelatedColorTemp(whitePoint);
-    ALOGV("CCT is %d", mCorrelatedColorTemp);
-}
-
-// Given a white point, find the correlated color temperature.
-// Formula taken from the paper "Calculating Correlated Color Temperatures
-// Across the Entire Gamut of Daylight and Skylight Chromaticities" by Hernandez
-// Andres et al. in 1999. The numbers are fitting parameters.
-int WhiteBalanceTest::findCorrelatedColorTemp(const Vec3f &whitePoint) {
-    Vec2f chromaOfWhitePoint(
-        whitePoint.r() / (whitePoint.r() + whitePoint.g() + whitePoint.b()),
-        whitePoint.g() / (whitePoint.r() + whitePoint.g() + whitePoint.b()));
-
-    float n = (chromaOfWhitePoint.x() - 0.3366f)
-                / (chromaOfWhitePoint.y() - 0.1735f);
-    float y = -949.86315f + 6253.80338f * exp(-n / 0.92159f)
-               + 28.70599f * exp(-n / 0.20039f) + 0.00004f * exp(-n / 0.07125f);
-
-    return static_cast<int>(y);
-}
-
-// Converts a RGB pixel value to XYZ color space.
-Vec3f WhiteBalanceTest::initializeFromRGB(const Vec3f &rgb) {
-    float linearRed = convertToLinear(rgb.r());
-    float linearGreen = convertToLinear(rgb.g());
-    float linearBlue = convertToLinear(rgb.b());
-
-    float x = 0.4124f * linearRed + 0.3576f * linearGreen +
-            0.1805f * linearBlue;
-    float y = 0.2126f * linearRed + 0.7152f * linearGreen +
-            0.0722f * linearBlue;
-    float z = 0.0193f * linearRed + 0.1192f * linearGreen +
-            0.9505f * linearBlue;
-
-    return Vec3f(x, y, z);
-}
-
-float WhiteBalanceTest::convertToLinear(float color) {
-    float norm = color/ 255.0f;
-    float linearColor;
-
-    // Convert from sRGB space to linear RGB value
-    if (norm > 0.04045f) {
-        linearColor = pow(((norm + 0.055f) / 1.055f), 2.4f);
-    } else {
-        linearColor = norm / 12.92f;
-    }
-
-    return linearColor;
-}
diff --git a/apps/CtsVerifier/libs/opencv-android.jar b/apps/CtsVerifier/libs/opencv-android.jar
new file mode 100644
index 0000000..1c13eee
--- /dev/null
+++ b/apps/CtsVerifier/libs/opencv-android.jar
Binary files differ
diff --git a/apps/CtsVerifier/libs/opencv-android_LICENSE b/apps/CtsVerifier/libs/opencv-android_LICENSE
new file mode 100644
index 0000000..5e32d88
--- /dev/null
+++ b/apps/CtsVerifier/libs/opencv-android_LICENSE
@@ -0,0 +1,33 @@
+By downloading, copying, installing or using the software you agree to this license.
+If you do not agree to this license, do not download, install,
+copy or use the software.
+
+
+                          License Agreement
+               For Open Source Computer Vision Library
+                       (3-clause BSD License)
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+  * Redistributions of source code must retain the above copyright notice,
+    this list of conditions and the following disclaimer.
+
+  * Redistributions in binary form must reproduce the above copyright notice,
+    this list of conditions and the following disclaimer in the documentation
+    and/or other materials provided with the distribution.
+
+  * Neither the names of the copyright holders nor the names of the contributors
+    may be used to endorse or promote products derived from this software
+    without specific prior written permission.
+
+This software is provided by the copyright holders and contributors "as is" and
+any express or implied warranties, including, but not limited to, the implied
+warranties of merchantability and fitness for a particular purpose are disclaimed.
+In no event shall copyright holders or contributors be liable for any direct,
+indirect, incidental, special, exemplary, or consequential damages
+(including, but not limited to, procurement of substitute goods or services;
+loss of use, data, or profits; or business interruption) however caused
+and on any theory of liability, whether in contract, strict liability,
+or tort (including negligence or otherwise) arising in any way out of
+the use of this software, even if advised of the possibility of such damage.
diff --git a/apps/CtsVerifier/proguard.flags b/apps/CtsVerifier/proguard.flags
index ca4680f..5a2beb5 100644
--- a/apps/CtsVerifier/proguard.flags
+++ b/apps/CtsVerifier/proguard.flags
@@ -16,6 +16,14 @@
 
 -keepclasseswithmembers class * extends com.android.cts.verifier.location.LocationModeTestActivity
 
+# keep mockito methods
+-keep class org.mockito.** { *; }
+-keep interface org.mockito.** { *; }
+-keep class com.google.dexmaker.** { *; }
+-keep interface com.google.dexmaker.** { *; }
+
 -dontwarn android.hardware.Sensor
 -dontwarn android.test.AndroidTestRunner
 -dontwarn java.util.concurrent.ConcurrentLinkedDeque
+-dontwarn android.cts.util.**
+-dontwarn junit.**
diff --git a/apps/CtsVerifier/res/drawable/prompt_x.png b/apps/CtsVerifier/res/drawable/prompt_x.png
new file mode 100644
index 0000000..64302dc
--- /dev/null
+++ b/apps/CtsVerifier/res/drawable/prompt_x.png
Binary files differ
diff --git a/apps/CtsVerifier/res/drawable/prompt_y.png b/apps/CtsVerifier/res/drawable/prompt_y.png
new file mode 100644
index 0000000..01926b5
--- /dev/null
+++ b/apps/CtsVerifier/res/drawable/prompt_y.png
Binary files differ
diff --git a/apps/CtsVerifier/res/drawable/prompt_z.png b/apps/CtsVerifier/res/drawable/prompt_z.png
new file mode 100644
index 0000000..f4d86d6
--- /dev/null
+++ b/apps/CtsVerifier/res/drawable/prompt_z.png
Binary files differ
diff --git a/apps/CtsVerifier/res/drawable/stat_sys_managed_profile_status.xml b/apps/CtsVerifier/res/drawable/stat_sys_managed_profile_status.xml
new file mode 100644
index 0000000..b04059e
--- /dev/null
+++ b/apps/CtsVerifier/res/drawable/stat_sys_managed_profile_status.xml
@@ -0,0 +1,29 @@
+<!--
+Copyright (C) 2015 The Android Open Source Project
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+    you may not use this file except in compliance with the License.
+    You may obtain a copy of the License at
+
+         http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+-->
+<vector xmlns:android="http://schemas.android.com/apk/res/android"
+        android:width="23.0dp"
+        android:height="18.0dp"
+        android:viewportWidth="21.0"
+        android:viewportHeight="17.0">
+    <group android:translateX="2.0">
+        <path
+            android:fillColor="#FFFFFFFF"
+            android:pathData="M9.9,11.6H7v-1.1H2.1v2.8c0,0.8,0.6,1.4,1.4,1.4h9.9c0.8,0,1.4,-0.6,1.4,-1.4v-2.8H9.9V11.6z"/>
+        <path
+            android:fillColor="#FFFFFFFF"
+            android:pathData="M14.1,4.2h-2.5V3.2l-1.1,-1.1H6.3L5.3,3.2v1H2.8C2,4.2,1.4,4.9,1.4,5.6v2.8c0,0.8,0.6,1.4,1.4,1.4H7V8.8h2.8v1.1h4.2     c0.8,0,1.4,-0.6,1.4,-1.4V5.6C15.5,4.9,14.8,4.2,14.1,4.2z M10.6,4.2H6.3V3.2h4.2V4.2z"/>
+    </group>
+</vector>
diff --git a/apps/CtsVerifier/res/layout/audio_dev_notify.xml b/apps/CtsVerifier/res/layout/audio_dev_notify.xml
new file mode 100644
index 0000000..98dbd8b
--- /dev/null
+++ b/apps/CtsVerifier/res/layout/audio_dev_notify.xml
@@ -0,0 +1,54 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2015 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+        android:layout_width="match_parent"
+        android:layout_height="wrap_content"
+        android:padding="10dip"
+        android:orientation="vertical">
+
+  <TextView
+      android:layout_width="match_parent"
+      android:layout_height="wrap_content"
+      android:scrollbars="vertical"
+      android:gravity="bottom"
+      android:id="@+id/info_text"
+      android:text="@string/audio_devices_notification_instructions" />
+
+  <LinearLayout
+      android:layout_width="match_parent"
+      android:layout_height="wrap_content"
+      android:orientation="vertical">
+      <Button
+          android:layout_width="match_parent"
+          android:layout_height="wrap_content"
+          android:id="@+id/audio_dev_notification_connect_clearmsgs_btn"
+          android:text="@string/audio_dev_notification_clearmsgs"/>
+
+      <TextView
+          android:layout_width="match_parent"
+          android:layout_height="wrap_content"
+          android:id="@+id/audio_dev_notification_connect_msg"/>
+
+      <TextView
+          android:layout_width="match_parent"
+          android:layout_height="wrap_content"
+          android:id="@+id/audio_dev_notification_disconnect_msg"/>
+
+    </LinearLayout>
+
+  <include layout="@layout/pass_fail_buttons" />
+
+</LinearLayout>
\ No newline at end of file
diff --git a/apps/CtsVerifier/res/layout/audio_frequency_line_activity.xml b/apps/CtsVerifier/res/layout/audio_frequency_line_activity.xml
new file mode 100644
index 0000000..69e3bc7
--- /dev/null
+++ b/apps/CtsVerifier/res/layout/audio_frequency_line_activity.xml
@@ -0,0 +1,72 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2015 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+        android:layout_width="match_parent"
+        android:layout_height="wrap_content"
+        android:padding="10dip"
+        android:orientation="vertical">
+
+  <TextView
+      android:layout_width="match_parent"
+      android:layout_height="wrap_content"
+      android:scrollbars="vertical"
+      android:gravity="bottom"
+      android:id="@+id/info_text"
+      android:text="@string/audio_frequency_line_instructions" />
+  <LinearLayout
+      android:layout_width="match_parent"
+      android:layout_height="wrap_content"
+      android:orientation="vertical">
+      <Button
+          android:layout_width="match_parent"
+          android:layout_height="wrap_content"
+          android:id="@+id/audio_frequency_line_plug_ready_btn"
+          android:text="@string/audio_frequency_line_plug_ready_btn"/>
+
+    <LinearLayout
+        android:orientation="vertical"
+        android:layout_width="match_parent"
+        android:layout_height="match_parent">
+
+     <LinearLayout
+          android:layout_width="wrap_content"
+          android:layout_height="wrap_content"
+          android:orientation="horizontal"
+          android:id="@+id/audio_frequency_line_layout">
+            <Button
+                android:layout_width="wrap_content"
+                android:layout_height="wrap_content"
+                android:text="@string/audio_frequency_line_test_btn"
+                android:id="@+id/audio_frequency_line_test_btn"/>
+
+            <ProgressBar
+                android:layout_width="wrap_content"
+                android:layout_height="wrap_content"
+                android:id="@+id/audio_frequency_line_progress_bar"/>
+        </LinearLayout>
+
+        <TextView
+            android:layout_width="wrap_content"
+            android:layout_height="wrap_content"
+            android:text="@string/audio_frequency_line_results_text"
+            android:id="@+id/audio_frequency_line_results_text"/>
+
+    </LinearLayout>
+    </LinearLayout>
+
+  <include layout="@layout/pass_fail_buttons" />
+
+</LinearLayout>
\ No newline at end of file
diff --git a/apps/CtsVerifier/res/layout/audio_frequency_mic_activity.xml b/apps/CtsVerifier/res/layout/audio_frequency_mic_activity.xml
new file mode 100644
index 0000000..10b0003
--- /dev/null
+++ b/apps/CtsVerifier/res/layout/audio_frequency_mic_activity.xml
@@ -0,0 +1,133 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2015 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<LinearLayout
+    xmlns:android="http://schemas.android.com/apk/res/android"
+    android:layout_width="match_parent"
+    android:layout_height="wrap_content"
+    android:padding="10dip"
+    android:orientation="vertical">
+
+    <ScrollView
+       android:layout_width="match_parent"
+       android:layout_height="match_parent"
+       android:id="@+id/scrollView">
+
+        <LinearLayout
+            android:layout_width="match_parent"
+            android:layout_height="wrap_content"
+            android:orientation="vertical">
+
+        <LinearLayout
+                    android:layout_width="wrap_content"
+                    android:layout_height="wrap_content"
+                    android:orientation="horizontal">
+          <TextView
+              android:layout_width="wrap_content"
+              android:layout_height="wrap_content"
+              android:layout_weight="1"
+              android:id="@+id/info_text"
+              android:text="@string/audio_frequency_mic_instructions"/>
+
+           <ProgressBar
+                        android:layout_width="wrap_content"
+                        android:layout_height="wrap_content"
+                        android:layout_weight="1"
+                        android:id="@+id/audio_frequency_mic_progress_bar"/>
+         </LinearLayout>
+
+          <Button
+              android:layout_width="match_parent"
+              android:layout_height="wrap_content"
+              android:id="@+id/audio_frequency_mic_speakers_ready_btn"
+              android:text="@string/audio_frequency_mic_speakers_ready_btn"/>
+
+          <TextView
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content"
+                android:scrollbars="vertical"
+                android:gravity="bottom"
+                android:id="@+id/audio_frequency_mic_speakers_ready_status"
+                android:text="@string/audio_frequency_mic_speakers_ready_status"/>
+
+            <LinearLayout
+                android:layout_width="wrap_content"
+                android:layout_height="wrap_content"
+                android:orientation="vertical"
+                android:id="@+id/audio_frequency_mic_layout_test1">
+
+            <TextView
+                android:layout_width="wrap_content"
+                android:layout_height="wrap_content"
+                android:text="@string/audio_frequency_mic_instructions2"
+                android:id="@+id/audio_frequency_mic_instructions2"/>
+
+              <Button
+                    android:layout_width="wrap_content"
+                    android:layout_height="wrap_content"
+                    android:text="@string/audio_frequency_mic_test1_btn"
+                    android:id="@+id/audio_frequency_mic_test1_btn"/>
+
+                <TextView
+                    android:layout_width="wrap_content"
+                    android:layout_height="wrap_content"
+                    android:text="@string/audio_frequency_mic_results_text"
+                    android:id="@+id/audio_frequency_mic_results1_text"/>
+          </LinearLayout>
+
+          <LinearLayout
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content"
+                android:orientation="vertical"
+                android:id="@+id/audio_frequency_mic_layout_test2a">
+
+            <Button
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content"
+                android:id="@+id/audio_frequency_mic_mic_ready_btn"
+                android:text="@string/audio_frequency_mic_mic_ready_btn"/>
+
+            <TextView
+                android:layout_width="wrap_content"
+                android:layout_height="wrap_content"
+                android:text="@string/audio_frequency_mic_usb_status"
+                android:id="@+id/audio_frequency_mic_usb_status"/>
+           </LinearLayout>
+
+          <LinearLayout
+              android:orientation="vertical"
+              android:layout_width="match_parent"
+              android:layout_height="match_parent"
+              android:id="@+id/audio_frequency_mic_layout_test2b">
+
+            <Button
+                android:layout_width="wrap_content"
+                android:layout_height="wrap_content"
+                android:text="@string/audio_frequency_mic_test2_btn"
+                android:id="@+id/audio_frequency_mic_test2_btn"/>
+
+              <TextView
+                  android:layout_width="wrap_content"
+                  android:layout_height="wrap_content"
+                  android:text="@string/audio_frequency_mic_results_text"
+                  android:id="@+id/audio_frequency_mic_results_text"/>
+
+        </LinearLayout>
+
+        <include layout="@layout/pass_fail_buttons"/>
+        </LinearLayout>
+      </ScrollView>
+
+</LinearLayout>
\ No newline at end of file
diff --git a/apps/CtsVerifier/res/layout/audio_frequency_speaker_activity.xml b/apps/CtsVerifier/res/layout/audio_frequency_speaker_activity.xml
new file mode 100644
index 0000000..5dd55b1
--- /dev/null
+++ b/apps/CtsVerifier/res/layout/audio_frequency_speaker_activity.xml
@@ -0,0 +1,92 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2015 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<LinearLayout
+    xmlns:android="http://schemas.android.com/apk/res/android"
+    android:layout_width="match_parent"
+    android:layout_height="wrap_content"
+    android:padding="10dip"
+    android:orientation="vertical">
+
+    <ScrollView
+       android:layout_width="match_parent"
+       android:layout_height="match_parent"
+       android:id="@+id/scrollView">
+
+        <LinearLayout
+            android:layout_width="match_parent"
+            android:layout_height="wrap_content"
+            android:orientation="vertical">
+
+        <TextView
+            android:layout_width="match_parent"
+            android:layout_height="wrap_content"
+            android:scrollbars="vertical"
+            android:gravity="bottom"
+            android:id="@+id/info_text"
+            android:text="@string/audio_frequency_speaker_instructions"/>
+
+        <LinearLayout
+            android:layout_width="match_parent"
+            android:layout_height="wrap_content"
+            android:orientation="vertical">
+            <Button
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content"
+                android:id="@+id/audio_frequency_speaker_mic_ready_btn"
+                android:text="@string/audio_frequency_speaker_mic_ready_btn"/>
+
+            <TextView
+                android:layout_width="wrap_content"
+                android:layout_height="wrap_content"
+                android:text="@string/audio_frequency_speaker_usb_status"
+                android:id="@+id/audio_frequency_speaker_usb_status"/>
+
+            <LinearLayout
+                android:orientation="vertical"
+                android:layout_width="match_parent"
+                android:layout_height="match_parent">
+
+                <LinearLayout
+                    android:layout_width="wrap_content"
+                    android:layout_height="wrap_content"
+                    android:orientation="horizontal"
+                    android:id="@+id/audio_frequency_speaker_layout">
+                    <Button
+                        android:layout_width="wrap_content"
+                        android:layout_height="wrap_content"
+                        android:text="@string/audio_frequency_speaker_test_btn"
+                        android:id="@+id/audio_frequency_speaker_test_btn"/>
+
+                    <ProgressBar
+                        android:layout_width="wrap_content"
+                        android:layout_height="wrap_content"
+                        android:id="@+id/audio_frequency_speaker_progress_bar"/>
+                </LinearLayout>
+
+                <TextView
+                    android:layout_width="wrap_content"
+                    android:layout_height="wrap_content"
+                    android:text="@string/audio_frequency_speaker_results_text"
+                    android:id="@+id/audio_frequency_speaker_results_text"/>
+
+            </LinearLayout>
+        </LinearLayout>
+
+        <include layout="@layout/pass_fail_buttons"/>
+        </LinearLayout>
+      </ScrollView>
+
+</LinearLayout>
\ No newline at end of file
diff --git a/apps/CtsVerifier/res/layout/audio_loopback_activity.xml b/apps/CtsVerifier/res/layout/audio_loopback_activity.xml
new file mode 100644
index 0000000..626ac4f
--- /dev/null
+++ b/apps/CtsVerifier/res/layout/audio_loopback_activity.xml
@@ -0,0 +1,83 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2015 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+        android:layout_width="match_parent"
+        android:layout_height="wrap_content"
+        android:padding="10dip"
+        android:orientation="vertical">
+
+  <TextView
+      android:layout_width="match_parent"
+      android:layout_height="wrap_content"
+      android:scrollbars="vertical"
+      android:gravity="bottom"
+      android:id="@+id/info_text"
+      android:text="@string/audio_loopback_instructions" />
+  <LinearLayout
+      android:layout_width="match_parent"
+      android:layout_height="wrap_content"
+      android:orientation="vertical">
+      <Button
+          android:layout_width="match_parent"
+          android:layout_height="wrap_content"
+          android:id="@+id/audio_loopback_plug_ready_btn"
+          android:text="@string/audio_loopback_plug_ready_btn"/>
+
+    <LinearLayout
+        android:orientation="vertical"
+        android:layout_width="match_parent"
+        android:layout_height="match_parent"
+        android:id="@+id/audio_loopback_layout">
+
+        <TextView
+            android:layout_width="wrap_content"
+            android:layout_height="wrap_content"
+            android:text="@string/audio_loopback_instructions2"
+            android:id="@+id/audio_loopback_instructions2"/>
+
+        <SeekBar
+            android:layout_width="match_parent"
+            android:layout_height="wrap_content"
+            android:id="@+id/audio_loopback_level_seekbar"/>
+
+        <TextView
+            android:layout_width="wrap_content"
+            android:layout_height="wrap_content"
+            android:text="@string/audio_loopback_level_text"
+            android:id="@+id/audio_loopback_level_text"/>
+
+        <Button
+            android:layout_width="wrap_content"
+            android:layout_height="wrap_content"
+            android:text="@string/audio_loopback_test_btn"
+            android:id="@+id/audio_loopback_test_btn"/>
+
+        <ProgressBar
+            android:layout_width="wrap_content"
+            android:layout_height="wrap_content"
+            android:id="@+id/audio_loopback_progress_bar"/>
+
+        <TextView
+            android:layout_width="wrap_content"
+            android:layout_height="wrap_content"
+            android:text="@string/audio_loopback_results_text"
+            android:id="@+id/audio_loopback_results_text"/>
+    </LinearLayout>
+    </LinearLayout>
+
+  <include layout="@layout/pass_fail_buttons" />
+
+</LinearLayout>
\ No newline at end of file
diff --git a/apps/CtsVerifier/res/layout/audio_routingnotifications_test.xml b/apps/CtsVerifier/res/layout/audio_routingnotifications_test.xml
new file mode 100644
index 0000000..cef30d6
--- /dev/null
+++ b/apps/CtsVerifier/res/layout/audio_routingnotifications_test.xml
@@ -0,0 +1,99 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2015 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+        android:layout_width="match_parent"
+        android:layout_height="wrap_content"
+        android:padding="10dip"
+        android:orientation="vertical">
+
+  <TextView
+      android:layout_width="match_parent"
+      android:layout_height="wrap_content"
+      android:scrollbars="vertical"
+      android:gravity="bottom"
+      android:id="@+id/info_text"
+      android:text="@string/audio_dev_routingnotification_instructions" />
+
+  <LinearLayout
+      android:layout_width="match_parent"
+      android:layout_height="wrap_content"
+      android:orientation="vertical"
+      android:id="@+id/audioTrackRoutingLayout">
+      <TextView
+          android:layout_width="match_parent"
+          android:layout_height="wrap_content"
+              android:text="@string/audio_routingnotification_playHeader"/>
+
+      <TextView
+          android:layout_width="match_parent"
+          android:layout_height="wrap_content"
+          android:id="@+id/audio_routingnotification_audioTrack_change"/>
+
+      <LinearLayout
+          android:layout_width="wrap_content"
+          android:layout_height="wrap_content"
+          android:orientation="horizontal">
+          <Button
+              android:layout_width="wrap_content"
+              android:layout_height="wrap_content"
+              android:id="@+id/audio_routingnotification_playBtn"
+              android:text="@string/audio_routingnotification_playBtn"/>
+
+          <Button
+              android:layout_width="match_parent"
+              android:layout_height="wrap_content"
+              android:id="@+id/audio_routingnotification_playStopBtn"
+              android:text="@string/audio_routingnotification_playStopBtn"/>
+      </LinearLayout>
+    </LinearLayout>
+
+  <LinearLayout
+      android:layout_width="match_parent"
+      android:layout_height="wrap_content"
+      android:orientation="vertical"
+      android:id="@+id/audioRecordRoutingLayout">
+      <TextView
+          android:layout_width="match_parent"
+          android:layout_height="wrap_content"
+              android:text="@string/audio_routingnotification_recHeader"/>
+
+      <TextView
+          android:layout_width="match_parent"
+          android:layout_height="wrap_content"
+          android:id="@+id/audio_routingnotification_audioRecord_change"/>
+
+      <LinearLayout
+          android:layout_width="wrap_content"
+          android:layout_height="wrap_content"
+          android:orientation="horizontal">
+          <Button
+              android:layout_width="wrap_content"
+              android:layout_height="wrap_content"
+              android:id="@+id/audio_routingnotification_recordBtn"
+              android:text="@string/audio_routingnotification_recBtn"/>
+
+          <Button
+              android:layout_width="match_parent"
+              android:layout_height="wrap_content"
+              android:id="@+id/audio_routingnotification_recordStopBtn"
+              android:text="@string/audio_routingnotification_recStopBtn"/>
+      </LinearLayout>
+    </LinearLayout>
+
+  <include layout="@layout/pass_fail_buttons" />
+
+</LinearLayout>
\ No newline at end of file
diff --git a/apps/CtsVerifier/res/layout/ca_main.xml b/apps/CtsVerifier/res/layout/ca_main.xml
deleted file mode 100644
index 274430d..0000000
--- a/apps/CtsVerifier/res/layout/ca_main.xml
+++ /dev/null
@@ -1,81 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-
-<!-- Copyright (C) 2011 The Android Open Source Project
-
-     Licensed under the Apache License, Version 2.0 (the "License");
-     you may not use this file except in compliance with the License.
-     You may obtain a copy of the License at
-
-          http://www.apache.org/licenses/LICENSE-2.0
-
-     Unless required by applicable law or agreed to in writing, software
-     distributed under the License is distributed on an "AS IS" BASIS,
-     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     See the License for the specific language governing permissions and
-     limitations under the License.
--->
-<android.support.wearable.view.BoxInsetLayout xmlns:android="http://schemas.android.com/apk/res/android"
-    xmlns:app="http://schemas.android.com/apk/res-auto"
-    android:layout_width="match_parent"
-    android:layout_height="match_parent">
-    <LinearLayout app:layout_box="all"
-      android:orientation="vertical" android:layout_width="fill_parent"
-      android:layout_height="fill_parent">
-
-
-      <LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
-        android:orientation="horizontal" android:layout_width="fill_parent"
-        android:layout_height="wrap_content">
-        <!--Button android:id="@+id/focusmodesbutton" android:layout_width="0px"
-          android:layout_height="wrap_content" android:text="@string/ca_focus_modes_label"
-          android:layout_weight="1" /-->
-        <Button android:id="@+id/findcheckerboardbutton" android:layout_width="0px"
-          android:layout_height="wrap_content" android:text="@string/ca_find_checkerboard_label"
-          android:layout_weight="1" />
-
-        <Button android:id="@+id/meteringbutton" android:layout_width="0px"
-          android:layout_height="wrap_content" android:text="@string/ca_metering_label"
-          android:layout_weight="1" />
-
-        <Button android:id="@+id/exposurecompensationbutton" android:layout_width="0px"
-          android:layout_height="wrap_content" android:text="@string/ca_exposure_test_label"
-          android:layout_weight="1"/>
-
-        <Button android:id="@+id/whitebalancebutton" android:layout_width="0px"
-          android:layout_height="wrap_content" android:text="@string/ca_wb_test_label"
-          android:layout_weight="1" />
-
-        <Button android:id="@+id/lockbutton" android:layout_width="0px"
-          android:layout_height="wrap_content" android:text="@string/ca_lock_test_label"
-          android:layout_weight="1" />
-      </LinearLayout>
-
-      <LinearLayout android:orientation="horizontal"
-        android:layout_width="fill_parent" android:layout_height="0px"
-        android:layout_weight="1">
-
-        <SurfaceView android:id="@+id/cameraview" android:layout_height="fill_parent"
-          android:layout_width="wrap_content"
-          android:layout_weight="0" />
-
-        <LinearLayout android:orientation="vertical"
-          android:layout_width="fill_parent" android:layout_height="match_parent"
-          android:layout_weight="1">
-
-           <ListView android:id="@+id/ca_tests"
-                android:layout_width="fill_parent"
-                android:layout_height="wrap_content"
-                android:layout_weight="1"
-                android:layout_marginLeft="10px"/>
-
-          <ImageView android:id="@+id/resultview" android:layout_height="wrap_content"
-            android:layout_width="fill_parent"
-            android:layout_weight="1" />
-        </LinearLayout>
-
-      </LinearLayout>
-
-      <include layout="@layout/pass_fail_buttons" />
-
-    </LinearLayout>
-</android.support.wearable.view.BoxInsetLayout>
diff --git a/apps/CtsVerifier/res/layout/cam_preview_overlay.xml b/apps/CtsVerifier/res/layout/cam_preview_overlay.xml
new file mode 100644
index 0000000..41bbeb1
--- /dev/null
+++ b/apps/CtsVerifier/res/layout/cam_preview_overlay.xml
@@ -0,0 +1,39 @@
+<?xml version="1.0" encoding="utf-8"?>
+<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    android:orientation="vertical" android:layout_width="match_parent"
+    android:layout_height="match_parent"
+    android:keepScreenOn="true">
+    <view
+        android:layout_width="match_parent"
+        android:layout_height="match_parent"
+        class="com.android.cts.verifier.sensors.RVCVCameraPreview"
+        android:id="@+id/cam_preview"
+        android:layout_centerVertical="true"
+        android:layout_centerHorizontal="true" />
+
+    <!--
+    <ImageView
+        android:layout_width="match_parent"
+        android:layout_height="match_parent"
+        android:layout_centerVertical="true"
+        android:id="@+id/cam_overlay"
+        android:src="@drawable/icon"
+        android:scaleType="fitStart"
+        />
+    -->
+    <view
+        android:layout_width="match_parent"
+        android:layout_height="match_parent"
+        class="com.android.cts.verifier.sensors.MotionIndicatorView"
+        android:id="@+id/cam_indicator"
+        android:layout_centerVertical="true"
+        android:layout_centerHorizontal="true" />
+
+    <ImageView
+        android:layout_width="wrap_content"
+        android:layout_height="wrap_content"
+        android:layout_centerInParent="true"
+        android:id="@+id/cam_overlay"
+        android:scaleType="fitStart"
+        />
+</RelativeLayout>
\ No newline at end of file
diff --git a/apps/CtsVerifier/res/layout/camera_flashlight.xml b/apps/CtsVerifier/res/layout/camera_flashlight.xml
new file mode 100644
index 0000000..2d4378c
--- /dev/null
+++ b/apps/CtsVerifier/res/layout/camera_flashlight.xml
@@ -0,0 +1,65 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright 2015 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    android:layout_width="match_parent"
+    android:layout_height="match_parent"
+    android:layout_gravity="bottom"
+    android:orientation="vertical">
+
+        <TextView
+            android:id="@+id/flash_instruction_text"
+            android:layout_width="match_parent"
+            android:layout_height="0dp"
+            android:layout_weight="2"
+            android:gravity="center"
+            android:text="@string/camera_flashlight_start_text" />
+
+        <Button
+            android:id="@+id/flash_instruction_button"
+            android:layout_width="match_parent"
+            android:layout_height="wrap_content"
+            android:layout_marginLeft="100dp"
+            android:layout_marginRight="100dp"
+            android:text="@string/camera_flashlight_start_button" />
+
+        <LinearLayout
+            android:layout_width="match_parent"
+            android:layout_height="wrap_content"
+            android:layout_marginTop="10dp"
+            android:layout_marginLeft="50dp"
+            android:layout_marginRight="50dp"
+            android:layout_marginBottom="50dp"
+            android:orientation="horizontal">
+
+            <Button
+                android:id="@+id/flash_on_button"
+                android:layout_width="0dp"
+                android:layout_height="match_parent"
+                android:layout_weight="1"
+                android:text="@string/camera_flashlight_on_button" />
+
+            <Button
+                android:id="@+id/flash_off_button"
+                android:layout_width="0dp"
+                android:layout_height="match_parent"
+                android:layout_weight="1"
+                android:text="@string/camera_flashlight_off_button" />
+
+        </LinearLayout>
+
+    <include layout="@layout/pass_fail_buttons"/>
+
+</LinearLayout>
diff --git a/apps/CtsVerifier/res/layout/byod_custom_view.xml b/apps/CtsVerifier/res/layout/dialog_custom_view.xml
similarity index 100%
rename from apps/CtsVerifier/res/layout/byod_custom_view.xml
rename to apps/CtsVerifier/res/layout/dialog_custom_view.xml
diff --git a/apps/CtsVerifier/res/layout/hifi_ultrasound.xml b/apps/CtsVerifier/res/layout/hifi_ultrasound.xml
new file mode 100644
index 0000000..7d2de5a
--- /dev/null
+++ b/apps/CtsVerifier/res/layout/hifi_ultrasound.xml
@@ -0,0 +1,51 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2011 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+              android:layout_width="match_parent"
+              android:layout_height="match_parent"
+              android:orientation="vertical">
+
+  <TextView
+      android:layout_width="match_parent"
+      android:layout_height="0dp"
+      android:layout_weight="7"
+      android:scrollbars="vertical"
+      android:gravity="bottom"
+      android:id="@+id/info_text"/>
+
+  <LinearLayout
+      android:layout_width="match_parent"
+      android:layout_height="0dp"
+      android:layout_weight="3"
+      android:orientation="horizontal">
+    <Button
+        android:layout_width="0dp"
+        android:layout_height="match_parent"
+        android:layout_weight="5"
+        android:text="@string/hifi_ultrasound_test_record"
+        android:id="@+id/recorder_button"/>
+    <Button
+        android:layout_width="0dp"
+        android:layout_height="match_parent"
+        android:layout_weight="5"
+        android:text="@string/hifi_ultrasound_test_play"
+        android:id="@+id/player_button"/>
+  </LinearLayout>
+
+  <include layout="@layout/pass_fail_buttons" />
+
+</LinearLayout>
diff --git a/apps/CtsVerifier/res/layout/hifi_ultrasound_popup.xml b/apps/CtsVerifier/res/layout/hifi_ultrasound_popup.xml
new file mode 100644
index 0000000..afff2c9
--- /dev/null
+++ b/apps/CtsVerifier/res/layout/hifi_ultrasound_popup.xml
@@ -0,0 +1,33 @@
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+              android:layout_width="match_parent"
+              android:layout_height="match_parent"
+              android:background="@android:color/white"
+              android:gravity="center"
+              android:orientation="vertical" >
+
+    <com.androidplot.xy.XYPlot
+        android:id="@+id/responseChart"
+        android:layout_width="match_parent"
+        android:layout_height="0dp"
+        android:layout_weight="9"
+        androidPlot.domainLabel="kHz"
+        androidPlot.rangeLabel="dB"
+        androidPlot.domainLabelWidget.labelPaint.textSize="16dp"
+        androidPlot.rangeLabelWidget.labelPaint.textSize="16dp"
+        androidPlot.graphWidget.rangeLabelPaint.textSize="16dp"
+        androidPlot.graphWidget.rangeOriginLabelPaint.textSize="16dp"
+        androidPlot.graphWidget.domainLabelPaint.textSize="16dp"
+        androidPlot.graphWidget.domainOriginLabelPaint.textSize="16dp"
+        androidPlot.legendWidget.textPaint.textSize="16dp"
+        androidPlot.legendWidget.iconSizeMetrics.heightMetric.value="16dp"
+        androidPlot.legendWidget.iconSizeMetrics.widthMetric.value="16dp"
+        androidPlot.legendWidget.heightMetric.value="16dp"
+        androidPlot.graphWidget.gridLinePaint.color="#000000" />
+
+    <Button
+        android:id="@+id/dismiss"
+        android:layout_width="match_parent"
+        android:layout_height="0dp"
+        android:layout_weight="1"
+        android:text="@string/hifi_ultrasound_test_dismiss" />
+</LinearLayout>
diff --git a/apps/CtsVerifier/res/layout/hifi_ultrasound_popup_instru.xml b/apps/CtsVerifier/res/layout/hifi_ultrasound_popup_instru.xml
new file mode 100644
index 0000000..42af6e9
--- /dev/null
+++ b/apps/CtsVerifier/res/layout/hifi_ultrasound_popup_instru.xml
@@ -0,0 +1,21 @@
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+              android:layout_width="wrap_content"
+              android:layout_height="wrap_content"
+              android:gravity="center"
+              android:background="@android:color/black"
+              android:padding="5dp"
+              android:orientation="vertical" >
+
+  <TextView
+      android:id="@+id/instru"
+      android:layout_width="match_parent"
+      android:layout_height="match_parent"
+      android:layout_weight="5" />
+
+  <Button
+      android:id="@+id/ok"
+      android:layout_width="match_parent"
+      android:layout_height="match_parent"
+      android:text="@string/hifi_ultrasound_test_ok"
+      android:layout_weight="1" />
+</LinearLayout>
diff --git a/apps/CtsVerifier/res/layout/keychain_main.xml b/apps/CtsVerifier/res/layout/keychain_main.xml
new file mode 100644
index 0000000..01eb255
--- /dev/null
+++ b/apps/CtsVerifier/res/layout/keychain_main.xml
@@ -0,0 +1,81 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+     Copyright (C) 2015 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<android.support.wearable.view.BoxInsetLayout
+    xmlns:android="http://schemas.android.com/apk/res/android"
+    xmlns:app="http://schemas.android.com/apk/res-auto"
+    android:layout_width="match_parent"
+    android:layout_height="match_parent">
+
+    <LinearLayout xmlns:app="http://schemas.android.com/apk/res-auto"
+        android:layout_width="match_parent"
+        android:layout_height="match_parent"
+        android:orientation="vertical"
+        android:padding="10dip" >
+
+        <TextView
+            android:id="@+id/test_instruction"
+            style="@style/InstructionsFont"
+            android:layout_width="match_parent"
+            android:layout_height="wrap_content"
+            android:layout_weight="0" />
+
+        <TextView
+            android:id="@+id/test_log"
+            android:layout_width="match_parent"
+            android:layout_height="0dp"
+            android:layout_weight="1"
+            android:layout_gravity="bottom"
+            android:orientation="vertical" />
+
+        <LinearLayout
+            android:id="@+id/action_buttons"
+            android:layout_width="match_parent"
+            android:layout_height="wrap_content"
+            android:layout_weight="0"
+            android:orientation="horizontal">
+
+            <Button
+                android:id="@+id/action_reset"
+                android:text="@string/keychain_reset"
+                android:layout_weight="1"
+                android:layout_width="0dp"
+                android:layout_height="wrap_content" />
+
+            <Button
+                android:id="@+id/action_skip"
+                android:text="@string/keychain_skip"
+                android:layout_weight="1"
+                android:layout_width="0dp"
+                android:layout_height="wrap_content" />
+
+            <Button
+                android:id="@+id/action_next"
+                android:text="@string/next_button_text"
+                android:layout_weight="1"
+                android:layout_width="0dp"
+                android:layout_height="wrap_content" />
+
+        </LinearLayout>
+
+        <include
+            android:layout_width="match_parent"
+            android:layout_height="wrap_content"
+            android:layout_weight="0"
+            layout="@layout/pass_fail_buttons" />
+
+    </LinearLayout>
+</android.support.wearable.view.BoxInsetLayout>
diff --git a/apps/CtsVerifier/res/layout/permission_lockdown.xml b/apps/CtsVerifier/res/layout/permission_lockdown.xml
new file mode 100644
index 0000000..ea295dd
--- /dev/null
+++ b/apps/CtsVerifier/res/layout/permission_lockdown.xml
@@ -0,0 +1,110 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2015 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+        android:id="@+id/permission_lockdown_activity"
+        android:orientation="vertical"
+        android:layout_width="match_parent"
+        android:layout_height="match_parent">
+
+    <ScrollView
+            android:layout_width="match_parent"
+            android:layout_height="0dp"
+            android:layout_weight="1"
+            android:paddingTop="4dp">
+        <TextView
+                android:id="@+id/test_instructions"
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content"
+                android:text="@string/device_profile_owner_permission_lockdown_test_instructions"
+                android:textSize="18sp"
+                android:padding="10dp" />
+    </ScrollView>
+
+    <ScrollView
+            android:layout_width="match_parent"
+            android:layout_height="0dp"
+            android:layout_weight="1">
+        <LinearLayout
+                android:orientation="vertical"
+                android:layout_width="match_parent"
+                android:layout_height="match_parent"
+                android:layout_gravity="center_vertical"
+                android:paddingStart="10dp"
+                android:paddingEnd="10dp">
+
+            <LinearLayout
+                    android:orientation="horizontal"
+                    android:layout_width="match_parent"
+                    android:layout_height="wrap_content"
+                    android:paddingTop="4dp">
+                <ImageView
+                        android:id="@+id/package_icon"
+                        android:layout_width="48dp"
+                        android:layout_height="48dp"
+                        android:scaleType="centerInside"
+                        android:gravity="center" />
+                <TextView
+                        android:id="@+id/package_name"
+                        android:layout_width="wrap_content"
+                        android:layout_height="match_parent"
+                        android:gravity="center_vertical"
+                        android:paddingLeft="10dp"
+                        android:textSize="16sp"
+                        android:singleLine="true"
+                        android:ellipsize="end" />
+            </LinearLayout>
+
+            <TextView
+                    android:id="@+id/permission_name"
+                    android:layout_width="wrap_content"
+                    android:layout_height="wrap_content"
+                    android:textSize="16sp"
+                    android:paddingTop="4dp" />
+
+            <RadioGroup
+                    android:id="@+id/permission_group"
+                    android:layout_width="wrap_content"
+                    android:layout_height="wrap_content"
+                    android:orientation="vertical"
+                    android:paddingTop="4dp">
+                <RadioButton
+                        android:id="@+id/permission_allow"
+                        android:text="@string/permission_allow"
+                        android:layout_width="wrap_content"
+                        android:layout_height="wrap_content" />
+                <RadioButton
+                        android:id="@+id/permission_default"
+                        android:text="@string/permission_default"
+                        android:layout_width="wrap_content"
+                        android:layout_height="wrap_content" />
+                <RadioButton
+                        android:id="@+id/permission_deny"
+                        android:text="@string/permission_deny"
+                        android:layout_width="wrap_content"
+                        android:layout_height="wrap_content" />
+            </RadioGroup>
+
+            <Button
+                    android:id="@+id/open_settings"
+                    android:layout_width="wrap_content"
+                    android:layout_height="wrap_content"
+                    android:layout_gravity="center_horizontal"
+                    android:text="@string/open_settings_button_label"
+                    android:onClick="openSettings" />
+        </LinearLayout>
+    </ScrollView>
+
+</LinearLayout>
\ No newline at end of file
diff --git a/apps/CtsVerifier/res/layout/positive_device_owner.xml b/apps/CtsVerifier/res/layout/positive_device_owner.xml
new file mode 100644
index 0000000..f5d10e0
--- /dev/null
+++ b/apps/CtsVerifier/res/layout/positive_device_owner.xml
@@ -0,0 +1,49 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2015 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+        android:orientation="vertical"
+        android:layout_width="match_parent"
+        android:layout_height="match_parent"
+        >
+
+    <ScrollView
+            android:layout_width="match_parent"
+            android:layout_height="320dp"
+            android:layout_weight="2">
+        <TextView
+                android:id="@+id/positive_device_owner_instructions"
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content"
+                android:padding="10dip"
+                android:text="@string/device_owner_positive_tests_instructions"
+                android:textSize="18dip" />
+    </ScrollView>
+
+    <Button
+        android:id="@+id/set_device_owner_button"
+        android:layout_width="204dp"
+        android:layout_height="wrap_content"
+        android:text="@string/set_device_owner_button_label" />
+
+    <ListView
+        android:id="@+id/android:list"
+        android:layout_width="match_parent"
+        android:layout_height="wrap_content"
+        android:layout_weight="3" />
+
+    <include layout="@layout/pass_fail_buttons" />
+
+</LinearLayout>
\ No newline at end of file
diff --git a/apps/CtsVerifier/res/layout/provisioning_byod.xml b/apps/CtsVerifier/res/layout/provisioning_byod.xml
index b1b75ba..375c3ab 100644
--- a/apps/CtsVerifier/res/layout/provisioning_byod.xml
+++ b/apps/CtsVerifier/res/layout/provisioning_byod.xml
@@ -24,19 +24,17 @@
             android:layout_height="320dp"
             android:layout_weight="2">
         <TextView
-                android:id="@+id/byod_instructions"
+                android:id="@+id/test_instructions"
                 android:layout_width="match_parent"
                 android:layout_height="wrap_content"
                 android:padding="10dip"
-                android:text="@string/provisioning_byod_instructions"
                 android:textSize="18dip" />
     </ScrollView>
 
     <Button
-        android:id="@+id/byod_start"
+        android:id="@+id/prepare_test_button"
         android:layout_width="204dp"
-        android:layout_height="wrap_content"
-        android:text="@string/provisioning_byod_start" />
+        android:layout_height="wrap_content" />
 
     <ListView
         android:id="@+id/android:list"
diff --git a/apps/CtsVerifier/res/layout/sec_screen_lock_keys_main.xml b/apps/CtsVerifier/res/layout/sec_screen_lock_keys_main.xml
new file mode 100644
index 0000000..af53335
--- /dev/null
+++ b/apps/CtsVerifier/res/layout/sec_screen_lock_keys_main.xml
@@ -0,0 +1,36 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2011 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
+        android:layout_width="match_parent"
+        android:layout_height="match_parent"
+        android:padding="10dip"
+        >
+
+    <Button android:id="@+id/sec_start_test_button"
+            android:layout_width="wrap_content"
+            android:layout_height="wrap_content"
+            android:layout_centerInParent="true"
+            android:text="@string/sec_start_test"
+            />
+
+    <include android:layout_width="match_parent"
+            android:layout_height="wrap_content"
+            android:layout_alignParentBottom="true"
+            layout="@layout/pass_fail_buttons"
+            />
+
+</RelativeLayout>
+
diff --git a/apps/CtsVerifier/res/layout/wifi_lockdown.xml b/apps/CtsVerifier/res/layout/wifi_lockdown.xml
new file mode 100644
index 0000000..ae6ea0c
--- /dev/null
+++ b/apps/CtsVerifier/res/layout/wifi_lockdown.xml
@@ -0,0 +1,77 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2015 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+        android:orientation="vertical"
+        android:layout_width="match_parent"
+        android:layout_height="match_parent"
+        >
+
+    <ScrollView
+            android:layout_width="match_parent"
+            android:layout_height="320dp"
+            android:layout_weight="2">
+        <TextView
+                android:id="@+id/device_owner_wifi_lockdown_info"
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content"
+                android:padding="10dip"
+                android:text="@string/device_owner_wifi_lockdown_info"
+                android:textSize="18dip" />
+    </ScrollView>
+
+    <EditText
+        android:id="@+id/device_owner_wifi_ssid"
+        android:hint="(SSID)"
+        android:layout_width="wrap_content"
+        android:layout_height="wrap_content" />
+
+    <RadioGroup
+        android:id="@+id/device_owner_keyManagementMethods"
+        android:layout_width="match_parent"
+        android:layout_height="wrap_content"
+        android:orientation="horizontal">
+        <RadioButton
+            android:id="@+id/device_owner_keymgmnt_none"
+            android:layout_width="wrap_content"
+            android:layout_height="wrap_content"
+            android:text="@string/device_owner_wifi_key_management_none_button" />
+        <RadioButton
+            android:id="@+id/device_owner_keymgmnt_wpa"
+            android:layout_width="wrap_content"
+            android:layout_height="wrap_content"
+            android:text="@string/device_owner_wifi_key_management_wpa_button" />
+        <RadioButton
+            android:id="@+id/device_owner_keymgmnt_wep"
+            android:layout_width="wrap_content"
+            android:layout_height="wrap_content"
+            android:text="@string/device_owner_wifi_key_management_wep_button" />
+    </RadioGroup>
+
+    <Button
+        android:id="@+id/create_wifi_config_button"
+        android:layout_width="204dp"
+        android:layout_height="wrap_content"
+        android:text="@string/create_wifi_config_button_label" />
+
+    <ListView
+        android:id="@+id/android:list"
+        android:layout_width="match_parent"
+        android:layout_height="wrap_content"
+        android:layout_weight="3" />
+
+    <include layout="@layout/pass_fail_buttons" />
+
+</LinearLayout>
\ No newline at end of file
diff --git a/apps/CtsVerifier/res/raw/cacert.der b/apps/CtsVerifier/res/raw/cacert.der
new file mode 100644
index 0000000..3934e1b
--- /dev/null
+++ b/apps/CtsVerifier/res/raw/cacert.der
Binary files differ
diff --git a/apps/CtsVerifier/res/raw/next_axis.mp3 b/apps/CtsVerifier/res/raw/next_axis.mp3
new file mode 100644
index 0000000..0a3174d
--- /dev/null
+++ b/apps/CtsVerifier/res/raw/next_axis.mp3
Binary files differ
diff --git a/apps/CtsVerifier/res/raw/stereo_mono_white_noise_48.mp3 b/apps/CtsVerifier/res/raw/stereo_mono_white_noise_48.mp3
new file mode 100644
index 0000000..e877fc1
--- /dev/null
+++ b/apps/CtsVerifier/res/raw/stereo_mono_white_noise_48.mp3
Binary files differ
diff --git a/apps/CtsVerifier/res/raw/usercert.der b/apps/CtsVerifier/res/raw/usercert.der
new file mode 100644
index 0000000..cdfb8f7
--- /dev/null
+++ b/apps/CtsVerifier/res/raw/usercert.der
Binary files differ
diff --git a/apps/CtsVerifier/res/raw/userkey.der b/apps/CtsVerifier/res/raw/userkey.der
new file mode 100644
index 0000000..31f1f8c
--- /dev/null
+++ b/apps/CtsVerifier/res/raw/userkey.der
Binary files differ
diff --git a/apps/CtsVerifier/res/values/strings.xml b/apps/CtsVerifier/res/values/strings.xml
index 07f7654..c5469a7 100644
--- a/apps/CtsVerifier/res/values/strings.xml
+++ b/apps/CtsVerifier/res/values/strings.xml
@@ -22,6 +22,7 @@
     <string name="info_button_text">Info</string>
     <string name="fail_button_text">Fail</string>
     <string name="next_button_text">Next</string>
+    <string name="go_button_text">Go</string>
 
     <!-- Strings for TestListActivity -->
     <string name="test_category_audio">Audio</string>
@@ -43,6 +44,7 @@
     <string name="test_results_cleared">Test results cleared.</string>
     <string name="view">View</string>
     <string name="test_results_error">Couldn\'t create test results report.</string>
+    <string name="runtime_permissions_error">Cannot continue. Please grant runtime permissions</string>
     <string name="export">Export</string>
     <string name="no_storage">Cannot save report to external storage, see log for details.</string>
     <string name="report_saved">Report saved to: %s</string>
@@ -126,6 +128,24 @@
     <string name="da_lock_success">It appears the screen was locked successfully!</string>
     <string name="da_lock_error">It does not look like the screen was locked...</string>
 
+    <!-- Strings for lock bound keys test -->
+    <string name="sec_lock_bound_key_test">Lock Bound Keys Test</string>
+    <string name="sec_lock_bound_key_test_info">
+        This test ensures that Keystore cryptographic keys that are bound to lock screen authentication
+        are unusable without a recent enough authentication. You need to set up a screen lock in order to
+        complete this test. If available, this test should be run by using fingerprint authentication
+        as well as PIN/pattern/password authentication.
+    </string>
+    <string name="sec_fingerprint_bound_key_test">Fingerprint Bound Keys Test</string>
+    <string name="sec_fingerprint_bound_key_test_info">
+        This test ensures that Keystore cryptographic keys that are bound to fingerprint authentication
+        are unusable without an authentication. You need to set up a fingerprint order to
+        complete this test.
+    </string>
+    <string name="sec_fp_dialog_message">Authenticate now with fingerprint</string>
+    <string name="sec_fp_auth_failed">Authentication failed</string>
+    <string name="sec_start_test">Start Test</string>
+
     <!-- Strings for BluetoothActivity -->
     <string name="bluetooth_test">Bluetooth Test</string>
     <string name="bluetooth_test_info">The Bluetooth Control tests check whether or not the device
@@ -298,6 +318,55 @@
 
     <string name="empty"></string>
 
+    <!-- Strings for HifiUltrasoundTestActivity -->
+    <string name="hifi_ultrasound_test">Hifi Ultrasound Microphone Test</string>
+    <string name="hifi_ultrasound_test_info">
+        This is a test for near-ultrasound (18500Hz - 20000Hz) microphone response.\n
+        This test requires two devices.\n</string>
+    <string name="hifi_ultrasound_test_play">PLAY</string>
+    <string name="hifi_ultrasound_test_record">RECORD</string>
+    <string name="hifi_ultrasound_test_plot">PLOT</string>
+    <string name="hifi_ultrasound_test_dismiss">DISMISS</string>
+    <string name="hifi_ultrasound_test_ok">OK</string>
+    <string name="hifi_ultrasound_test_instruction1">
+        Open Hifi Ultrasound Microphone Test on the test device and the reference device.\n
+        Set the media volume of the reference device at 70% and hold it with one hand.\n
+        Hold the testing device with the other hand\n
+        Press the RECORD button on the testing device, then the PLAY button on the reference device within one second.\n
+        After the test, report result on the testing (recording) device.\n</string>
+    <string name="hifi_ultrasound_test_pass">PASS</string>
+    <string name="hifi_ultrasound_test_fail">FAIL</string>
+    <string name="hifi_ultrasound_test_default_false_string">false</string>
+    <string name="hifi_ultrasound_test_mic_no_support">
+        Device does not support near-ultrasound recording.\n
+        All new phones and tablets MUST support near-ultrasound recording.\n
+        Report FAIL if this is a new device, report PASS if this is an updating device.\n</string>
+    <string name="hifi_ultrasound_test_spkr_no_support">
+        Device does not support near-ultrasound playback.\n
+        If this is your reference device, please use a different reference device.\n</string>
+
+    <string name="hifi_ultrasound_speaker_test">Hifi Ultrasound Speaker Test</string>
+    <string name="hifi_ultrasound_speaker_test_info">
+        This is a test for near-ultrasound (18500Hz - 20000Hz) speaker response.\n
+        This test requires two devices.\n</string>
+    <string name="hifi_ultrasound_speaker_test_instruction1">
+        Open Hifi Ultrasound Speaker Test on the test device and the reference device.\n
+        Set the media volume of the testing device at 70% and hold it with one hand.\n
+        Hold the reference device with the other hand\n
+        Press the RECORD button on the reference device, then the PLAY button on the testing device within one second.\n
+        After the test, report result on the testing (playback) device.\n</string>
+    <string name="hifi_ultrasound_speaker_test_mic_no_support">
+        Device does not support near-ultrasound recording.\n
+        If this is your reference device, please use a different reference device.\n</string>
+    <string name="hifi_ultrasound_speaker_test_spkr_no_support">
+        Device does not support near-ultrasound playback.\n
+        All new phones and tablets MUST support near-ultrasound playback.\n
+        Report FAIL if this is a new device, report PASS if this is an updating device.\n</string>
+    <string name="hifi_ultrasound_speaker_test_test_side">
+        Please wait for the result on the reference device then report here.</string>
+    <string name="hifi_ultrasound_speaker_test_reference_side">
+        Please report on the testing device.\n</string>
+
     <!-- Strings for Location tests -->
     <string name="location_gps_test">GPS Test</string>
     <string name="location_gps_test_info">This test verifies basic GPS behavior
@@ -331,6 +400,7 @@
     <string name="nfc_pee_2_pee">Peer-to-Peer Data Exchange</string>
     <string name="nfc_ndef_push_sender">NDEF Push Sender</string>
     <string name="nfc_ndef_push_receiver">NDEF Push Receiver</string>
+    <string name="nfc_llcp_version_check">LLCP version check</string>
 
     <string name="nfc_tag_verification">Tag Verification</string>
     <string name="nfc_ndef">NDEF</string>
@@ -352,6 +422,13 @@
     <string name="nfc_ndef_push_receive_failure">Failed to receive the correct NDEF push
         message.</string>
 
+    <string name="nfc_llcp_version_check_info">This test requires two candidate devices
+       with NFC enabled to exchange P2P messages. Start the \"LLCP version check\" test on
+       the other candidate device also, and touch the devices back to back. This test
+       then verifies that the candidate device correctly advises the LLCP version as 1.2</string>
+    <string name="nfc_llcp_version_check_failure">The candidate devices does not report LLCP
+       version 1.2 or higher.</string>
+    <string name="nfc_llcp_version_check_success">The candidate device has a valid LLCP version.</string>
     <string name="nfc_tag_verifier">NFC Tag Verifier</string>
     <string name="nfc_tag_verifier_info">Follow the on-screen instructions to write and read
         a tag of the chosen technology.</string>
@@ -496,6 +573,8 @@
     <string name="snsr_test_skipped">SKIPPED</string>
     <string name="snsr_test_fail">FAIL</string>
     <string name="snsr_execution_time">Test execution time %1$s sec</string>
+    <string name="snsr_rvcvxchk_test">Rotation Vector CV XCheck</string>
+    <string name="snsr_rvcvxchk_test_rec">Rotation Vector CV XCheck Recording</string>
 
     <!-- Strings to interact with users in Sensor Tests -->
     <string name="snsr_test_play_sound">A sound will be played once the verification is complete...</string>
@@ -600,6 +679,14 @@
     <string name="snsr_step_counter_event">%1$d | Step Counter event. count=%2$d.</string>
     <string name="snsr_step_detector_event">%1$d | Step Detector event.</string>
 
+    <!-- Device suspend tests -->
+    <string name="snsr_device_suspend_test">Device Suspend Tests</string>
+    <string name="snsr_device_did_not_go_into_suspend">Device did not go into suspend mode during test execution </string>
+    <string name="snsr_batch_did_not_arrive_at_expected_time">Batch did not arrive at the expected time estimatedBatchArrivalMs=%1$d
+    firstEventReceivedMs=%2$d diffMs=%3$d toleranceMs=%4$d </string>
+    <string name="snsr_device_suspend_test_instr">One you begin the test, disconnect USB, turn off the display and allow
+    the device to go into suspend mode. The screen will turn on and a sound will be played once all the tests are completed.</string>
+
     <!-- Significant Motion -->
     <string name="snsr_significant_motion_test">Significant Motion Tests</string>
     <string name="snsr_significant_motion_event_arrival">Event expected to trigger. Triggered=%1$s.</string>
@@ -614,6 +701,9 @@
     <string name="snsr_significant_motion_test_deactivation">Once you begin the test, you will need to walk to ensure Significant Motion triggers only once.</string>
     <string name="snsr_significant_motion_registration">Expected to be able to register for TriggerSensor. Found=%1$b.</string>
     <string name="snsr_significant_motion_cancelation">Expected to be able to cancel TriggerSensor. Found=%b.</string>
+    <string name="snsr_significant_motion_ap_suspend">One you begin the test, disconnect USB, turn off the display and allow the device to go into suspend.
+    You will need to walk to ensure that Significant Motion triggers. The screen will turn on and a sound will be played once the test completes.</string>
+    <string name="snsr_device_did_not_wake_up_at_trigger">Device did not wakeup at tigger time. wakeTime=%1$d ms triggerTime=%2$d ms</string>
 
     <!-- Strings for Sensor CTS tests inside CtsVerifier -->
     <string name="snsr_single_sensor_tests">CTS Single Sensor Tests</string>
@@ -636,18 +726,6 @@
     <string name="congratulations">Congratulations!</string>
     <string name="no_suid_files">No unauthorized suid files detected!</string>
 
-    <!-- Strings for Camera Analyzer -->
-    <string name="camera_analyzer">Camera Analyzer</string>
-    <string name="ca_find_checkerboard_label">Find target</string>
-    <string name="ca_check_formats_label">Output formats</string>
-    <string name="ca_exposure_test_label">Exposure Comp.</string>
-    <string name="ca_result_label">Results will be here</string>
-    <string name="ca_wb_test_label">White Balance</string>
-    <string name="ca_lock_test_label">AE Lock</string>
-    <string name="ca_metering_label">Metering Area</string>
-    <string name="ca_focus_modes_label">Focus Modes</string>
-    <string name="ca_info">This test checks the image quality of the camera of this device. It requires a MacBeth 4x6 color checker. With an ADK board and a lamp connected to it on the Relay 1 port, all tests can be run automatically. Without the ADK board, all the tests except the Auto Exposure Lock Test can be run automatically and the Auto Exposure Lock Test will require users to turn on/off a lamp according to the instruction given. </string>
-
     <!-- Strings for Camera Orientation -->
     <string name="camera_orientation">Camera Orientation</string>
     <string name="co_info">This test verifies the orientation capabilities of
@@ -815,6 +893,25 @@
     <string name="its_test_passed">All Camera ITS tests passed.  Pass button enabled!</string>
     <string name="its_test_failed">Some Camera ITS tests failed.</string>
 
+    <!-- Strings for the Camera Flashlight test activity -->
+    <string name="camera_flashlight_test">Camera Flashlight</string>
+    <string name="camera_flashlight_info">
+        This test checks the flashlight functionality. It will turn on and off the flashlight of
+        each camera device that has a flash unit. Follow the instructions on screen and observe the
+        flashlight status changing.
+    </string>
+    <string name="camera_flashlight_start_button">Start</string>
+    <string name="camera_flashlight_next_button">Next</string>
+    <string name="camera_flashlight_done_button">Done</string>
+    <string name="camera_flashlight_on_button">On</string>
+    <string name="camera_flashlight_off_button">Off</string>
+    <string name="camera_flashlight_start_text">Press Start to start flashlight test.</string>
+    <string name="camera_flashlight_question_text">Is Camera %1$s flashlight on or off?</string>
+    <string name="camera_flashlight_next_text">Ok. Press next.</string>
+    <string name="camera_flashlight_failed_text">Test failed. Press Done or Fail button.</string>
+    <string name="camera_flashlight_passed_text">All tests passed. Press Done or Pass button.
+    </string>
+
     <!-- Strings for StreamingVideoActivity -->
     <string name="streaming_video">Streaming Video Quality Verifier</string>
     <string name="streaming_video_info">This is a test for assessing the quality of streaming videos.  Play each stream and verify that the video is smooth and in sync with the audio, and that there are no quality problems.</string>
@@ -1001,13 +1098,12 @@
         respecting user preferences about notification ranking and filtering.
     </string>
     <string name="attention_ready">I\'m done</string>
-    <string name="attention_filter_all">Please set the notification filter to \"All\" in the dialog
-        that appears when you change the device\'s volume.</string>
-    <string name="attention_filter_priority">Please set the notification filter to \"Priority\" in
-        the dialog that appears when you change the device\'s volume, and allow messages from
-        starred contacts.</string>
-    <string name="attention_filter_none">Please set the notification filter to \"None\" in the dialog
-        that appears when you change the device\'s volume.</string>
+    <string name="attention_filter_all">Please disable \"Do not disturb\" by tapping the Quick Settings tile.</string>
+    <string name="attention_filter_priority">Please select \"Priority only\" in the dialog that appears
+        when you tap the \"Do not disturb\" tile in Quick Settings, and customize the setting to allow messages from
+        starred contacts only by tapping "More settings".</string>
+    <string name="attention_filter_none">Please select \"Total silence\" in the dialog that appears
+        when you tap the \"Do not disturb\" tile in Quick Settings.</string>
     <string name="attention_create_contacts">Create contacts for notification annotations.</string>
     <string name="attention_delete_contacts">Delete test contacts.</string>
     <string name="attention_default_order">Check that ranker defaults to time order.</string>
@@ -1109,6 +1205,23 @@
     <string name="caboot_reboot_desc">Please reboot the device and return to this test.</string>
     <string name="caboot_after_boot">AFTER REBOOTING: Check that there is a notification that the network may be monitored. Opening that notification should show a dialog box giving more information, with a button to check trusted credentials. This should open up the same view of trusted credentials that you get via the "Check credentials" button.</string>
 
+    <!-- Strings for KeyChain -->
+    <string name="keychain_test">KeyChain Storage Test</string>
+    <string name="keychain_info">This test checks that credentials installed to the system can be granted, retrieved, and used to create valid HTTPS connections.</string>
+    <string name="keychain_reset">Reset</string>
+    <string name="keychain_skip">Skip</string>
+    <string name="keychain_setup_desc">The first step sets up an internal KeyStore and generates credentials to use for the remainder of the test.\n\n
+ Touch \'Next\' to begin.</string>
+    <string name="keychain_install_desc">Credentials generated. Touch \'Next\' to install them to the system keychain.\n\n
+The container for the credentials will not be protected with a password; if prompted for one, leave that field blank.\n\n
+During installation you may be prompted for a name - accept the default suggestion.\n\n
+In the case that these credentials were already installed, you may skip this step.</string>
+    <string name="keychain_https_desc">The last test involves setting up an HTTPS connection using credentials from the KeyChain.\n\n
+You should be prompted to select credentials; choose the ones you just installed in the previous step.</string>
+    <string name="keychain_reset_desc">Before marking this test as passed, touch \'Next\' to open security settings and reset the following items:\n
+ 1. Clear device credentials.\n
+ 2. Change the lock screen type to \'None\'.</string>
+
     <!-- Strings for Widget -->
     <string name="widget_framework_test">Widget Framework Test</string>
     <string name="widget_framework_test_info">This test checks some basic features of the widget
@@ -1293,9 +1406,11 @@
     <string name="snsr_rotation_vector_set_final">Place the device back to the reference position.</string>
     <string name="snsr_rotation_vector_verification">Angular deviation [%1$4.1f %2$4.1f %3$4.1f]. Current: %4$f deg. Max tolerated: %5$f.</string>
 
+    <!-- Strings common for BYOD and DO managed provisioning tests. -->
+    <string name="afw_device_admin">CTS Verifier - AfW Admin</string>
+
     <!-- Strings for BYOD managed provisioning tests (ByodFlowTestActivity) -->
     <string name="test_category_managed_provisioning">Managed Provisioning</string>
-    <string name="provisioning_byod_device_admin">CTS Verifier - BYOD Admin</string>
     <string name="provisioning_byod">BYOD Managed Provisioning</string>
     <string name="provisioning_byod_info">
         This test exercises the BYOD managed provisioning flow.
@@ -1334,6 +1449,42 @@
         \n
         Verify that you are prompted with the above choices and both options work as intended. Then mark this test accordingly.
     </string>
+    <string name="provisioning_byod_keyguard_disabled_features">Keyguard disabled features</string>
+    <string name="provisioning_byod_keyguard_disabled_features_info">
+        This test exercises Keyguard Disabled Features. Follow instructions above.
+    </string>
+    <string name="provisioning_byod_keyguard_disabled_features_instruction">
+        Please press the \"Prepare test\" button to disable trust agents.\n
+        Then please press through the following verification steps.\n
+        Note: Device password will be set to \"testpassword\". After leaving the screen device password be cleared.
+    </string>
+    <string name="provisioning_byod_keyguard_disabled_features_prepare_button">Prepare test</string>
+    <string name="provisioning_byod_disable_trust_agents">Disable trust agents</string>
+    <string name="provisioning_byod_disable_trust_agents_instruction">
+        Please press the Go button to go to Settings > Security. Then go to Trusted agents and\n
+        check if the agents are shown as disabled by the administrator.
+        Then please press Back and mark the test as \"Pass\" or \"Fail\".
+    </string>
+    <string name="provisioning_byod_fingerprint_disabled_in_settings">Fingerprint is disabled in Settings</string>
+    <string name="provisioning_byod_fingerprint_disabled_in_settings_instruction">
+        Please press the Go button to go to Settings > Security. Then go to Fingerprint and\n
+        check if the screen is shown as disabled by the administrator.
+        Then please press Back and mark the test as \"Pass\" or \"Fail\".
+    </string>
+    <string name="provisioning_byod_disable_fingerprint">Fingerprint disabled on keyguard</string>
+    <string name="provisioning_byod_disable_fingerprint_instruction">
+        Please press the Go button to lock the screen. Then try to log in using the fingerprint reader.\n
+        Expected result is you cannot log in using your fingerprint.\n
+        After you log back in, please navigate back to CtsVerifier and mark the test as \"Pass\" or \"Fail\".
+    </string>
+    <string name="provisioning_byod_disable_notifications">Notifications disabled on keyguard</string>
+    <string name="provisioning_byod_disable_notifications_instruction">
+        Please press the Go button to lock the screen. Wait a couple of seconds and look out for a
+        notification from CtsVerifier.\n
+        Expected result is the notification is shown as \"Contents hidden\", you can not see the contents
+        (Which would read \"This is a work notification\").\n
+        After you log back in, please navigate back to CtsVerifier and mark the test as \"Pass\" or \"Fail\".
+    </string>
     <string name="provisioning_byod_work_notification">Work notification is badged</string>
     <string name="provisioning_byod_work_notification_instruction">
         Please press the Go button to trigger a notification.\n
@@ -1341,6 +1492,32 @@
         Verify that the notification is badged (see sample badge below). Then mark this test accordingly.
     </string>
     <string name="provisioning_byod_work_notification_title">This is a work notification</string>
+    <string name="provisioning_byod_work_status_icon">Work status icon is displayed</string>
+    <string name="provisioning_byod_work_status_icon_instruction">
+        Verify that the current status bar does not have a work status icon (see sample icon below).
+        \n\n
+        Please press the Go button to launch a work activity.
+        \n\n
+        Verify that the status bar now has a work status icon. Then mark this test accordingly.
+    </string>
+    <string name="provisioning_byod_work_status_icon_activity">
+        Verify that the current status bar has a work status notification.
+        \n\n
+        Please press finish to return to the tests and then mark this test accordingly.
+    </string>
+    <string name="provisioning_byod_work_status_toast">Work status toast is displayed</string>
+    <string name="provisioning_byod_work_status_toast_instruction">
+        Please press the Go button to launch a work activity.
+        \n\n
+        Follow instructions and then return and mark this test accordingly.
+    </string>
+    <string name="provisioning_byod_work_status_toast_activity">
+        Turn off the screen and wait a few seconds then turn on the screen again.
+        \n\n
+        Verify that a toast was displayed saying you are in the work profile.
+        \n\n
+        Please press finish to return to the tests and then mark this test accordingly.
+    </string>
     <string name="provisioning_byod_profile_visible_instruction">
         Please press the Go button to open the Settings page.
         Navigate to Accounts and confirm that:\n
@@ -1355,7 +1532,7 @@
         Navigate to Device administrators and confirm that:\n
         \n
         - Both Personal and Work categories exist.\n
-        - \"CTS Verifier - BYOD Admin\" exists under the Work category, and is activated.\n
+        - \"CTS Verifier - AfW Admin\" exists under the Work category, and is activated.\n
         \n
         Use the Back button to return to this page.
     </string>
@@ -1388,6 +1565,29 @@
         Then use the Back button to return to this test and mark accordingly.
     </string>
 
+    <string name="provisioning_byod_battery_settings">Profile-aware battery settings</string>
+    <string name="provisioning_byod_battery_settings_instruction">
+        Please press the Go button to open Battery page in settings.\n
+        \n
+        Verify that Battery page shows both badged and unbadged apps in the usage list.\n
+        \n
+        Note that the usage list only displays usage since last charge,
+        so you may need to unplug your device and use a badged and unbadged app
+        for a little while before they will appear in the list.\n
+        \n
+        Then use the Back button to return to this test and mark accordingly.
+    </string>
+
+    <string name="provisioning_byod_data_usage_settings">Profile-aware data usage settings</string>
+    <string name="provisioning_byod_data_usage_settings_instruction">
+        Please press the Go button to open the Settings page.\n
+        \n
+        Navigate to Data usage page and confirm that it includes a Work profile section,
+        and that tapping it shows just work profile data usage.\n
+        \n
+        Then use the Back button to return to this test and mark accordingly.
+    </string>
+
     <string name="provisioning_byod_cred_settings">Profile-aware trusted credential settings</string>
     <string name="provisioning_byod_cred_settings_instruction">
         Please press the Go button to open the Security settings.
@@ -1409,6 +1609,8 @@
         Then use the Back button to return to this test and mark accordingly.
     </string>
 
+    <string name="provisioning_byod_cross_profile_intent_filters">Cross profile intent filters are set</string>
+
     <string name="provisioning_byod_nfc_beam">Disable Nfc beam</string>
     <string name="provisioning_byod_nfc_beam_allowed_instruction">
         Please press the Go button to test if Nfc beam can be triggered in the work profile.\n
@@ -1434,7 +1636,6 @@
     <string name="provisioning_byod_delete_profile">Initiate deletion of work profile.</string>
     <string name="provisioning_byod_profile_deleted">Work profile deleted.</string>
     <string name="provisioning_byod_disabled">Device provisioning is not enabled.</string>
-    <string name="provisioning_byod_go">Go</string>
     <string name="provisioning_button_finish">Finish</string>
     <string name="provisioning_cross_profile_chooser">Choose an app to complete action</string>
 
@@ -1446,6 +1647,165 @@
     <string name="device_owner_negative_test">Device owner negative test</string>
     <string name="device_owner_negative_test_info">Please click the "Start provisioning" button, and when you see a warning dialog telling the device is already set up, select "pass". Otherwise, select "fail".</string>
     <string name="start_device_owner_provisioning_button">Start provisioning</string>
+    <string name="positive_device_owner">Device Owner Tests</string>
+    <string name="device_owner_positive_tests">Device Owner positive tests</string>
+    <string name="device_owner_positive_tests_instructions">
+            The positive device owner tests verify policies on a corporate owned device.\n
+            Press below button first, follow steps described in the dialog that pops up,
+            then proceed to the test cases.\n
+            Pressing \'back\', \'pass\' or \'fail\' on this test page will remove the device owner.\n
+            Alternatively, you can run the \'Remove device owner\' test. Ideally, that test should
+            be run last so that it does not interfere with other tests.
+    </string>
+    <string name="device_owner_positive_tests_info">
+            The positive device owner tests verify policies on a corporate owned device.\n
+            Press below button first, follow steps described in the dialog that pops up,
+            then proceed to the test cases.\n
+            Pressing \'back\', \'pass\' or \'fail\' on this test page will remove the device owner.\n
+            Alternatively, you can run the \'Remove device owner\' test. Ideally, that test should
+            be run last so that it does not interfere with other tests.
+    </string>
+    <string name="device_owner_positive_category">Device Owner Tests</string>
+    <string name="set_device_owner_button_label">Set up device owner</string>
+    <string name="set_device_owner_dialog_title">Set up device owner</string>
+    <string name="set_device_owner_dialog_text">
+            Please set the device owner by enabling USB debugging on the device and issuing the following command on the host:\n
+            adb shell dpm set-device-owner \'com.android.cts.verifier/com.android.cts.verifier.managedprovisioning.DeviceAdminTestReceiver\'
+    </string>
+    <string name="device_owner_remove_device_owner_test">Remove device owner</string>
+    <string name="device_owner_remove_device_owner_test_info">
+            Please check in Settings &gt; Security &gt; Device Administrators if CTSVerifier is
+            Device Owner. Then press the button below, and check that CTSVerifier is NOT Device
+            Owner anymore.
+    </string>
+    <string name="remove_device_owner_button">Remove device owner</string>
+    <string name="device_owner_check_device_owner_test">Check device owner</string>
+    <string name="device_owner_incorrect_device_owner">Missing or incorrect device owner: CTSVerifier is not DO!</string>
+    <string name="device_owner_wifi_lockdown_test">WiFi configuration lockdown</string>
+    <string name="device_owner_wifi_lockdown_info">
+            Please enter the SSID and auth method of an available WiFi Access Point and press the button to create a
+            WiFi configuration. This configuration can be seen on Settings &gt; WiFi. The test cases
+            are going use this config. Please go through test cases in order (from top to bottom).
+    </string>
+    <string name="switch_wifi_lockdown_off_button">WiFi config lockdown off</string>
+    <string name="switch_wifi_lockdown_on_button">WiFi config lockdown on</string>
+    <string name="wifi_lockdown_go_settings_wifi_button">Go to WiFi Settings</string>
+    <string name="device_owner_wifi_key_management_none_button">None</string>
+    <string name="device_owner_wifi_key_management_wpa_button">WPA</string>
+    <string name="device_owner_wifi_key_management_wep_button">WEP</string>
+    <string name="create_wifi_config_button_label">Create WiFi configuration</string>
+    <string name="wifi_lockdown_add_network_failed_dialog_title">WiFi configuration could not be created</string>
+    <string name="wifi_lockdown_add_network_failed_dialog_text">
+            There was an error during creation of WiFi configuration. Check if WiFi is switched on.
+    </string>
+    <string name="device_owner_wifi_config_unlocked_modification_test">Unlocked config is modifiable in Settings</string>
+    <string name="device_owner_wifi_config_unlocked_modification_test_info">
+            Please press the button to ensure WiFi config lockdown is NOT in effect. Then go to
+            Settings &gt; WiFi and see if the CTSVerifier created WiFi configuration can be edited.
+            Please make sure you can connect to it. The test is successful if the config is editable
+            and can be connected to.
+    </string>
+    <string name="device_owner_wifi_config_locked_modification_test">Locked config is not modifiable in Settings</string>
+    <string name="device_owner_wifi_config_locked_modification_test_info">
+            Please press the button to ensure WiFi config lockdown is in effect. Then go to
+            Settings &gt; WiFi and see if the CTSVerifier created WiFi configuration can NOT be edited
+            or removed. The test is successful if the config is NOT modifiable.
+    </string>
+    <string name="device_owner_wifi_config_locked_connection_test">Locked config can be connected to</string>
+    <string name="device_owner_wifi_config_locked_connection_test_info">
+            Please press the button to ensure WiFi config lockdown is in effect. Then go to
+            Settings &gt; WiFi and see if the CTSVerifier created WiFi configuration can be connected
+            to manually. The test is successful if the connection can be established.
+    </string>
+    <string name="device_owner_wifi_config_unlocked_removal_test">Unlocked config can be forgotten in Settings</string>
+    <string name="device_owner_wifi_config_unlocked_removal_test_info">
+            Please press the button to ensure WiFi config lockdown is NOT in effect. Then go to
+            Settings &gt; WiFi and see if the CTSVerifier created WiFi configuration can be forgotten.
+            The test is successful if the config could be forgotten and is removed from the list of saved configs.
+    </string>
+    <string name="device_owner_disable_statusbar_test">Disable status bar</string>
+    <string name="device_owner_disable_statusbar_test_info">
+            Please press the below button to disable the status bar and verify that quick settings, notifications
+            and the assist gesture are no longer available.\n
+            Next, press the button to reenable the status bar and verify that quick settings, notification
+            and the assist gesture are available again.\n
+            Please mark the test accordingly.
+    </string>
+    <string name="device_owner_disable_statusbar_button">Disable status bar</string>
+    <string name="device_owner_reenable_statusbar_button">Reenable status bar</string>
+    <string name="device_owner_disable_keyguard_test">Disable keyguard</string>
+    <string name="device_owner_disable_keyguard_test_info">
+            Note that any device passwords that you might have set will be deleted during this test.\n
+            Please press the below button to disable the keyguard. Press the power button on your device to
+            switch off the screen. Then press the power button to switch the screen back on and verify that
+            no keyguard was shown.\n
+            Next, press the button to reenable the keyguard and repeat the above steps, this time verifying that
+            a keyguard was shown again.\n
+            Please mark the test accordingly.
+    </string>
+    <string name="device_owner_disable_keyguard_button">Disable keyguard</string>
+    <string name="device_owner_reenable_keyguard_button">Reenable keyguard</string>
+    <string name="device_profile_owner_permission_lockdown_test">Permissions lockdown</string>
+    <string name="device_profile_owner_permission_lockdown_test_instructions">
+            Select each of the three grant states for the permission shown below in turn.\n
+            Now open application settings, select Permissions, and verify if the following behaviour is observed.\n
+            <b>Allow:</b> Permission is granted to the app and cannot be changed through the settings UI.\n
+            <b>Let user decide:</b> Permission state can be changed through the settings UI.\n
+            <b>Deny:</b> Permission is denied to the app and cannot be changed through the settings UI.\n
+            Please mark the test accordingly.
+    </string>
+    <string name="device_owner_permission_lockdown_test_info">
+        This test checks if the permissions state in settings UI is locked down according to the state set by the device owner.
+    </string>
+    <string name="profile_owner_permission_lockdown_test_info">
+        <b>
+        Before proceeding, check if com.android.cts.permissionapp (aka CtsPermissionApp) is installed in work profile by going to Settings &gt; Apps. If not, please install the app before proceeding.\n\n
+        </b>
+        This test checks if the permissions state in settings UI is locked down correctly depending on the state set by the profile owner.
+    </string>
+    <string name="package_not_found">You must install %s (aka CtsPermissionApp).</string>
+    <string name="permission_allow">Grant</string>
+    <string name="permission_default">Let user decide</string>
+    <string name="permission_deny">Deny</string>
+    <string name="not_profile_owner">%s is not profile owner.</string>
+    <string name="not_device_owner">%s is not device owner.</string>
+    <string name="activity_not_found">No activity found to handle intent: %s</string>
+    <string name="open_settings_button_label">Open Application Settings</string>
+    <string name="finish_button_label">Finish</string>
+    <string name="device_owner_device_admin_visible">Device administrator settings</string>
+    <string name="device_owner_device_admin_visible_info">
+        Please press the Go button to open the Security page in Settings.
+        Navigate to Device administrators and confirm that:\n
+        \n
+        - \"CTS Verifier - AfW Admin\" exists and is activated.\n
+        - \"CTS Verifier - AfW Admin\" cannot be disabled.\n
+        \n
+        Use the Back button to return to this page.
+    </string>
+    <string name="device_owner_disallow_config_bt">Disallow configuring Bluetooth</string>
+    <string name="device_owner_disallow_config_bt_info">
+        Please press the Set restriction button to set the user restriction.
+        Then press Go to open the Bluetooth page in Settings.
+        Confirm that:\n
+        \n
+        - You cannot view Bluetooth devices in range.\n
+        - You cannot edit, add or remove any already paired devices.\n
+        \n
+        Use the Back button to return to this page.
+    </string>
+    <string name="device_owner_disallow_config_wifi">Disallow configuring WiFi</string>
+    <string name="device_owner_disallow_config_wifi_info">
+        Please press the Set restriction button to set the user restriction.
+        Then press Go to open the WiFi page in Settings.
+        Confirm that:\n
+        \n
+        - You cannot view WiFi networks in range.\n
+        - You cannot edit, add or remove any existing WiFi configs.\n
+        \n
+        Use the Back button to return to this page.
+    </string>
+    <string name="device_owner_user_restriction_set">Set restriction</string>
+    <string name="device_owner_settings_go">Go</string>
 
     <!-- Strings for JobScheduler Tests -->
     <string name="js_test_description">This test is mostly automated, but requires some user interaction. You can pass this test once the list items below are checked.</string>
@@ -1561,6 +1921,41 @@
     The Spanish audio track should be selected.
     </string>
 
+    <string name="tv_time_shift_test">TV app time shift test</string>
+    <string name="tv_time_shift_test_info">
+    This test verifies that the TV app invokes proper time shift APIs in the framwork.
+    </string>
+    <string name="tv_time_shift_test_pause_resume">
+    Press the \"Launch TV app\" button. Verify that the playback control is available.
+    Pause the playback and then resume it.
+    </string>
+    <string name="tv_time_shift_test_verify_resume_after_pause">
+    The playback should be resumed after pause.
+    </string>
+    <string name="tv_time_shift_test_verify_position_tracking">
+    The playback position tracking should be activated.
+    </string>
+    <string name="tv_time_shift_test_speed_rate">
+    Press the \"Launch TV app\" button. Verify that the playback control is available.
+    Rewind the playback and in a few seconds fast-forward it.
+    </string>
+    <string name="tv_time_shift_test_verify_rewind">
+    The playback should be rewinded.
+    </string>
+    <string name="tv_time_shift_test_verify_fast_forward">
+    The playback should be fast-forwarded.
+    </string>
+    <string name="tv_time_shift_test_seek">
+    Press the \"Launch TV app\" button. Verify that the playback control is available.
+    Seek to previous and then seek to next.
+    </string>
+    <string name="tv_time_shift_test_verify_seek_to_previous">
+    The playback position should be moved to the previous position.
+    </string>
+    <string name="tv_time_shift_test_verify_seek_to_next">
+    The playback position should be moved to the next position.
+    </string>
+
     <string name="overlay_view_text">Overlay View Dummy Text</string>
     <string name="fake_rating">Fake</string>
 
@@ -1580,4 +1975,116 @@
     <string name="error_screen_pinning_did_not_start">Screen was not pinned.</string>
     <string name="error_screen_pinning_did_not_exit">Screen was not unpinned.</string>
     <string name="error_screen_pinning_couldnt_exit">Could not exit screen pinning through API.</string>
+
+    <!--  Audio Devices Notifcations Test -->
+    <string name="audio_devices_notifications_test">Audio Devices Notifications Test</string>
+    <string name="audio_devices_notification_instructions">
+          Click the "Clear Messages" button then connect and disconnect a wired headset.
+          Note if the appropriate notification messages appear below.
+    </string>
+    <string name="audio_dev_notification_clearmsgs">Clear Messages</string>
+    <string name="audio_dev_notification_connectMsg">CONNECT DETECTED</string>
+    <string name="audio_dev_notification_disconnectMsg">DISCONNECT DETECTED</string>
+
+    <!--  Audio Routing Notifcations Test -->
+    <string name="audio_routingnotifications_test">Audio Routing Notifications Test</string>
+    <string name="audio_dev_routingnotification_instructions">
+          Click on the "Play" button in the AudioTrack Routing Notifictions section below to
+          start (silent) playback. Insert a wired headset. Observe a message acknowledging the
+          rerouting event below. Remove the wired headset and observe the new routing message.
+          Click on the "Stop" button to stop playback.\n
+          Repeat the process with "Record" and "Stop" button in the AudioRecord Routing
+          Notifications section below.
+    </string>
+    <string name="audio_routingnotification_playBtn">Play</string>
+    <string name="audio_routingnotification_playStopBtn">Stop</string>
+    <string name="audio_routingnotification_recBtn">Record</string>
+    <string name="audio_routingnotification_recStopBtn">Stop</string>
+    <string name="audio_routingnotification_playHeader">AudioTrack Routing Notifications</string>
+    <string name="audio_routingnotification_recHeader">AudioRecord Routing Notifications</string>
+    <string name="audio_routingnotification_trackRoutingMsg">AudioTrack rerouting</string>
+    <string name="audio_routingnotification_recordRoutingMsg">AudioRecord rerouting</string>
+
+    <!-- Audio Loopback Latency Test -->
+    <string name="audio_loopback_test">Audio Loopback Latency Test</string>
+     <string name="audio_loopback_info">
+          This test requires the Loopback Plug. Please connect a Loopback Plug on the headset
+          connector, and proceed with the instructions on the screen.
+          The system will measure the input-output audio latency by injecting a pulse on the output,
+          and computing the distance between replicas of the pulse.
+          You can vary the Audio Level slider to ensure the pulse will feed back at adecuate levels.
+          Repeat until a confidence level >= 0.6 is achieved.
+    </string>
+    <string name="audio_loopback_instructions">
+          Please connect a "Loopback Plug" and press "Loopback Plug Ready".
+    </string>
+    <string name="audio_loopback_plug_ready_btn">Loopback Plug Ready</string>
+    <string name="audio_loopback_instructions2">
+          Set the audio level to a suitable value, then press Test button.
+          It might require multiple tries until a confidence >= 0.6 is achieved.
+    </string>
+    <string name="audio_loopback_level_text">Audio Level</string>
+    <string name="audio_loopback_test_btn">Test</string>
+    <string name="audio_loopback_results_text">Results...</string>
+
+   <!-- Audio Frequency Line Test -->
+    <string name="audio_frequency_line_test">Audio Frequency Line Test</string>
+    <string name="audio_frequency_line_info">
+        The system will measure the frequency response of the left and right line outputs,
+        by feeding them back thru the microphone conection with the loopback jack.
+        This test requires the Loopback Plug. Please connect a Loopback Plug on the headset
+        connector, and proceed with the instructions on the screen.
+        </string>
+    <string name="audio_frequency_line_instructions">
+          Please connect a "Loopback Plug" and press "Loopback Plug Ready".
+    </string>
+    <string name="audio_frequency_line_plug_ready_btn">Loopback Plug Ready</string>
+
+    <string name="audio_frequency_line_test_btn">Test</string>
+    <string name="audio_frequency_line_results_text">Results...</string>
+
+    <!-- Audio Frequency Speaker Test -->
+    <string name="audio_frequency_speaker_test">Audio Frequency Speaker Test</string>
+    <string name="audio_frequency_speaker_info">
+        This test requires an external USB reference microphone. Please connect the USB microphone and proceed with the instructions on the screen.
+        The system will measure frequency response of the left and right speakers (if there are two speakers), or twice the response of the mono speaker.
+       </string>
+    <string name="audio_frequency_speaker_instructions">
+          Please connect an USB reference microphone and press "USB Reference microphone ready"
+    </string>
+    <string name="audio_frequency_speaker_usb_status">Waiting for USB microphone...</string>
+    <string name="audio_frequency_speaker_mic_ready_btn">USB Reference microphone ready</string>
+    <string name="audio_frequency_speaker_mic_ready_text">USB Audio device detected\n\nPlease set up Device Under test
+    in quiet room, and Microphone 20 cms perpendicular to center of screen, then press TEST</string>
+    <string name="audio_frequency_speaker_mic_not_ready_text">"No USB Audio device detected. Please reconnect."</string>
+    <string name="audio_frequency_speaker_test_btn">Test</string>
+    <string name="audio_frequency_speaker_results_text">Results...</string>
+
+    <!-- Audio Frequency Microphone Test -->
+    <string name="audio_frequency_mic_test">Audio Frequency Microphone Test</string>
+    <string name="audio_frequency_mic_info">
+        This test requires an external USB reference microphone and external speakers.
+        Please use the headphone connector to connect external speakers. Position the device 40 cms
+        from the speakers and proceed with the instructions on the screen.
+        The system will measure frequency response of the built in microphone.
+       </string>
+    <string name="audio_frequency_mic_instructions">
+          Please connect external speakers using the headphone connector. Please unplug any USB audio device (if any)
+    </string>
+    <string name="audio_frequency_mic_speakers_ready_btn">External speakers ready</string>
+    <string name="audio_frequency_mic_speakers_ready_status">...</string>
+    <string name="audio_frequency_mic_instructions2">
+          Please position the speakers 40 cms from the device under test and press TEST 1
+    </string>
+    <string name="audio_frequency_mic_test1_btn">Test 1</string>
+    <string name="audio_frequency_mic_usb_status">Waiting for USB microphone...</string>
+    <string name="audio_frequency_mic_connect_mic">Please Connect USB microphone, position it next to
+    the built in microphone in the device and press USB reference microphone</string>
+    <string name="audio_frequency_mic_mic_ready_btn">USB Reference microphone ready</string>
+    <string name="audio_frequency_mic_mic_ready_text">USB Audio device detected\n\nPlease set up Device Under test
+    in quiet room, and Microphone 20 cms perpendicular to center of screen, then press TEST</string>
+    <string name="audio_frequency_mic_mic_not_ready_text">"No USB Audio device detected. Please reconnect."</string>
+    <string name="audio_frequency_mic_test2_btn">Test 2</string>
+    <string name="audio_frequency_mic_results_text">Results...</string>
+
 </resources>
diff --git a/apps/CtsVerifier/res/xml/device_admin_byod.xml b/apps/CtsVerifier/res/xml/device_admin_byod.xml
index 0408ce2..ce44794 100644
--- a/apps/CtsVerifier/res/xml/device_admin_byod.xml
+++ b/apps/CtsVerifier/res/xml/device_admin_byod.xml
@@ -19,6 +19,9 @@
     <uses-policies>
         <encrypted-storage />
         <wipe-data />
+        <reset-password />
+        <disable-keyguard-features />
+        <force-lock />
     </uses-policies>
 </device-admin>
 <!-- END_INCLUDE(meta_data) -->
diff --git a/apps/CtsVerifier/res/xml/mock_tv_input_service.xml b/apps/CtsVerifier/res/xml/mock_tv_input_service.xml
index 1a2cf86..d9cb867 100644
--- a/apps/CtsVerifier/res/xml/mock_tv_input_service.xml
+++ b/apps/CtsVerifier/res/xml/mock_tv_input_service.xml
@@ -15,5 +15,4 @@
 -->
 
 <tv-input xmlns:android="http://schemas.android.com/apk/res/android"
-    android:setupActivity="com.android.cts.verifier.tv.MockTvInputSetupActivity"
-    android:settingsActivity="com.android.cts.verifier.tv.MockTvInputSettingsActivity" />
+    android:setupActivity="com.android.cts.verifier.tv.MockTvInputSetupActivity" />
diff --git a/apps/CtsVerifier/res/xml/ultrasound_line_formatter_median.xml b/apps/CtsVerifier/res/xml/ultrasound_line_formatter_median.xml
new file mode 100644
index 0000000..9c6de77
--- /dev/null
+++ b/apps/CtsVerifier/res/xml/ultrasound_line_formatter_median.xml
@@ -0,0 +1,6 @@
+<?xml version="1.0" encoding="utf-8"?>
+<config
+        linePaint.strokeWidth="3dp"
+        linePaint.color="#AA0000"
+        vertexPaint.color="#770000"
+        fillPaint.color="#00000000" />
diff --git a/apps/CtsVerifier/res/xml/ultrasound_line_formatter_noise.xml b/apps/CtsVerifier/res/xml/ultrasound_line_formatter_noise.xml
new file mode 100644
index 0000000..8fb236e
--- /dev/null
+++ b/apps/CtsVerifier/res/xml/ultrasound_line_formatter_noise.xml
@@ -0,0 +1,6 @@
+<?xml version="1.0" encoding="utf-8"?>
+<config
+        linePaint.strokeWidth="2dp"
+        linePaint.color="#777777"
+        vertexPaint.color="777777"
+        fillPaint.color="#00000000" />
diff --git a/apps/CtsVerifier/res/xml/ultrasound_line_formatter_pass.xml b/apps/CtsVerifier/res/xml/ultrasound_line_formatter_pass.xml
new file mode 100644
index 0000000..9a6c29a
--- /dev/null
+++ b/apps/CtsVerifier/res/xml/ultrasound_line_formatter_pass.xml
@@ -0,0 +1,6 @@
+<?xml version="1.0" encoding="utf-8"?>
+<config
+        linePaint.strokeWidth="2dp"
+        linePaint.color="#007700"
+        vertexPaint.color="#007700"
+        fillPaint.color="#00000000" />
diff --git a/apps/CtsVerifier/res/xml/ultrasound_line_formatter_trials.xml b/apps/CtsVerifier/res/xml/ultrasound_line_formatter_trials.xml
new file mode 100644
index 0000000..3f9ffc2
--- /dev/null
+++ b/apps/CtsVerifier/res/xml/ultrasound_line_formatter_trials.xml
@@ -0,0 +1,6 @@
+<?xml version="1.0" encoding="utf-8"?>
+<config
+        linePaint.strokeWidth="1dp"
+        linePaint.color="#AAAAAA"
+        vertexPaint.color="#777777"
+        fillPaint.color="#00000000" />
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/AbstractTestListActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/AbstractTestListActivity.java
index 409b0db..3132219 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/AbstractTestListActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/AbstractTestListActivity.java
@@ -44,8 +44,13 @@
     }
 
     @Override
-    protected void onActivityResult(int requestCode, int resultCode, Intent data) {
+    protected final void onActivityResult(int requestCode, int resultCode, Intent data) {
         super.onActivityResult(requestCode, resultCode, data);
+        handleActivityResult(requestCode, resultCode, data);
+    }
+
+    /** Override this in subclasses instead of onActivityResult */
+    protected void handleActivityResult(int requestCode, int resultCode, Intent data) {
         switch (requestCode) {
             case LAUNCH_TEST_REQUEST_CODE:
                 handleLaunchTestResult(resultCode, data);
@@ -66,7 +71,7 @@
         setContentView(R.layout.list_content);
     }
 
-    private void handleLaunchTestResult(int resultCode, Intent data) {
+    protected void handleLaunchTestResult(int resultCode, Intent data) {
         if (resultCode == RESULT_OK) {
             TestResult testResult = TestResult.fromActivityResult(resultCode, data);
             mAdapter.setTestResult(testResult);
@@ -75,8 +80,13 @@
 
     /** Launch the activity when its {@link ListView} item is clicked. */
     @Override
-    protected void onListItemClick(ListView listView, View view, int position, long id) {
+    protected final void onListItemClick(ListView listView, View view, int position, long id) {
         super.onListItemClick(listView, view, position, id);
+        handleItemClick(listView, view, position, id);
+    }
+
+    /** Override this in subclasses instead of onListItemClick */
+    protected void handleItemClick(ListView listView, View view, int position, long id) {
         Intent intent = getIntent(position);
         startActivityForResult(intent, LAUNCH_TEST_REQUEST_CODE);
     }
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/DialogTestListActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/DialogTestListActivity.java
new file mode 100644
index 0000000..789effa
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/DialogTestListActivity.java
@@ -0,0 +1,276 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cts.verifier;
+
+import android.app.AlertDialog;
+import android.content.ActivityNotFoundException;
+import android.content.Context;
+import android.content.DialogInterface;
+import android.content.Intent;
+import android.database.DataSetObserver;
+import android.os.Bundle;
+import android.view.LayoutInflater;
+import android.view.View;
+import android.view.View.OnClickListener;
+import android.widget.Button;
+import android.widget.ImageView;
+import android.widget.ListView;
+import android.widget.TextView;
+import android.widget.Toast;
+
+import com.android.cts.verifier.R;
+
+/**
+ * Test list activity that supports showing dialogs with pass/fail buttons instead of
+ * starting new activities.
+ * In addition to that dialogs have a 'go' button that can be configured to launch an intent.
+ * Instructions are shown on top of the screen and a test preparation button is provided.
+ */
+public abstract class DialogTestListActivity extends PassFailButtons.TestListActivity {
+    private final int mLayoutId;
+    private final int mTitleStringId;
+    private final int mInfoStringId;
+    private final int mInstructionsStringId;
+
+    protected Button mPrepareTestButton;
+
+    protected int mCurrentTestPosition;
+
+    protected DialogTestListActivity(int layoutId, int titleStringId, int infoStringId,
+            int instructionsStringId) {
+        mLayoutId = layoutId;
+        mTitleStringId = titleStringId;
+        mInfoStringId = infoStringId;
+        mInstructionsStringId = instructionsStringId;
+    }
+
+    @Override
+    protected void onCreate(Bundle savedInstanceState) {
+        super.onCreate(savedInstanceState);
+
+        setContentView(mLayoutId);
+        setInfoResources(mTitleStringId, mInfoStringId, -1);
+        setPassFailButtonClickListeners();
+        getPassButton().setEnabled(false);
+        setResult(RESULT_CANCELED);
+
+        ArrayTestListAdapter adapter = new ArrayTestListAdapter(this);
+
+        setupTests(adapter);
+
+        adapter.registerDataSetObserver(new DataSetObserver() {
+            @Override
+            public void onChanged() {
+                updatePassButton();
+            }
+        });
+
+        setTestListAdapter(adapter);
+
+        mCurrentTestPosition = 0;
+
+        TextView instructionTextView = (TextView)findViewById(R.id.test_instructions);
+        instructionTextView.setText(mInstructionsStringId);
+        mPrepareTestButton = (Button)findViewById(R.id.prepare_test_button);
+    }
+
+    /**
+     * Subclasses must add their tests items to the provided adapter(usually instances of
+     * {@link DialogTestListItem} or {@link DialogTestListItemWithIcon} but any class deriving from
+     * {@link TestListAdapter.TestListItem} will do).
+     * @param adapter The adapter to add test items to.
+     */
+    protected abstract void setupTests(ArrayTestListAdapter adapter);
+
+    // Enable Pass Button when all tests passed.
+    private void updatePassButton() {
+        getPassButton().setEnabled(mAdapter.allTestsPassed());
+    }
+
+    public class DefaultTestCallback implements DialogTestListItem.TestCallback {
+        final private DialogTestListItem mTest;
+
+        public DefaultTestCallback(DialogTestListItem test) {
+            mTest = test;
+        }
+
+        @Override
+        public void onPass() {
+            clearRemainingState(mTest);
+            setTestResult(mTest, TestResult.TEST_RESULT_PASSED);
+        }
+
+        @Override
+        public void onFail() {
+            clearRemainingState(mTest);
+            setTestResult(mTest, TestResult.TEST_RESULT_FAILED);
+        }
+    }
+
+    public void showManualTestDialog(final DialogTestListItem test) {
+        showManualTestDialog(test, new DefaultTestCallback(test));
+    }
+
+    public void showManualTestDialog(final DialogTestListItem test,
+            final DialogTestListItem.TestCallback callback) {
+        AlertDialog.Builder dialogBuilder = new AlertDialog.Builder(this)
+                .setIcon(android.R.drawable.ic_dialog_info)
+                .setTitle(mTitleStringId)
+                .setNeutralButton(R.string.go_button_text, null)
+                .setPositiveButton(R.string.pass_button_text, new AlertDialog.OnClickListener() {
+                    @Override
+                    public void onClick(DialogInterface dialog, int which) {
+                        callback.onPass();
+                    }
+                })
+                .setNegativeButton(R.string.fail_button_text, new AlertDialog.OnClickListener() {
+                    @Override
+                    public void onClick(DialogInterface dialog, int which) {
+                        callback.onFail();
+                    }
+                });
+        View customView = test.getCustomView();
+        if (customView != null) {
+            dialogBuilder.setView(customView);
+        } else {
+            dialogBuilder.setMessage(test.getManualTestInstruction());
+        }
+        final AlertDialog dialog = dialogBuilder.show();
+        // Note: Setting the OnClickListener on the Dialog rather than the Builder, prevents the
+        // dialog being dismissed on onClick.
+        dialog.getButton(AlertDialog.BUTTON_NEUTRAL).setOnClickListener(new OnClickListener() {
+            @Override
+            public void onClick(View v) {
+                if (!startTestIntent(test)) {
+                    dialog.dismiss();
+                }
+            }
+        });
+    }
+
+    @Override
+    protected void handleItemClick(ListView l, View v, int position, long id) {
+        TestListAdapter.TestListItem test = (TestListAdapter.TestListItem) getListAdapter()
+                .getItem(position);
+        if (test instanceof DialogTestListItem) {
+            mCurrentTestPosition = position;
+            ((DialogTestListItem)test).performTest(this);
+        } else {
+            super.handleItemClick(l, v, position, id);
+        }
+    }
+
+
+    /**
+     * Start a test's manual intent
+     * @param test The test the manual intent of which is to be started.
+     * @return true if activity could be started successfully, false otherwise.
+     */
+    boolean startTestIntent(final DialogTestListItem test) {
+        final Intent intent = test.intent;
+        try {
+            startActivity(intent);
+        } catch (ActivityNotFoundException e) {
+            Toast.makeText(this, "Cannot start " + intent, Toast.LENGTH_LONG).show();
+            setTestResult(test, TestResult.TEST_RESULT_FAILED);
+            return false;
+        }
+        return true;
+    }
+
+    protected void clearRemainingState(final DialogTestListItem test) {
+        // do nothing, override in subclass if needed
+    }
+
+    protected void setTestResult(DialogTestListItem test, int result) {
+        // Bundle result in an intent to feed into handleLaunchTestResult
+        Intent resultIntent = new Intent();
+        TestResult.addResultData(resultIntent, result, test.testName, /* testDetails */ null,
+                /* reportLog */ null);
+        handleLaunchTestResult(RESULT_OK, resultIntent);
+        getListView().smoothScrollToPosition(mCurrentTestPosition + 1);
+    }
+
+    protected void showToast(int messageId) {
+        String message = getString(messageId);
+        Toast.makeText(this, message, Toast.LENGTH_SHORT).show();
+    }
+
+    protected static class DialogTestListItem extends TestListAdapter.TestListItem {
+
+        public interface TestCallback {
+            void onPass();
+            void onFail();
+        }
+
+        private String mManualInstruction;
+
+        public DialogTestListItem(Context context, int nameResId, String testId) {
+            super(context.getString(nameResId), testId, null, null, null, null);
+        }
+
+        public DialogTestListItem(Context context, int nameResId, String testId,
+                int testInstructionResId, Intent testIntent) {
+            super(context.getString(nameResId), testId, testIntent, null, null, null);
+            mManualInstruction = context.getString(testInstructionResId);
+        }
+
+        public void performTest(DialogTestListActivity activity) {
+            activity.showManualTestDialog(this);
+        }
+
+        public String getManualTestInstruction() {
+            return mManualInstruction;
+        }
+
+        public Intent getManualTestIntent() {
+            return intent;
+        }
+
+        public View getCustomView() {
+            return null;
+        }
+
+        @Override
+        boolean isTest() {
+            return true;
+        }
+    }
+
+    protected static class DialogTestListItemWithIcon extends DialogTestListItem {
+
+        private final int mImageResId;
+        private final Context mContext;
+
+        public DialogTestListItemWithIcon(Context context, int nameResId, String testId,
+                int testInstructionResId, Intent testIntent, int imageResId) {
+            super(context, nameResId, testId, testInstructionResId, testIntent);
+            mContext = context;
+            mImageResId = imageResId;
+        }
+
+        @Override
+        public View getCustomView() {
+            LayoutInflater layoutInflater = LayoutInflater.from(mContext);
+            View view = layoutInflater.inflate(R.layout.dialog_custom_view,
+                    null /* root */);
+            ((ImageView) view.findViewById(R.id.sample_icon)).setImageResource(mImageResId);
+            ((TextView) view.findViewById(R.id.message)).setText(getManualTestInstruction());
+            return view;
+        }
+    }
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/TestListActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/TestListActivity.java
index 8cfc6df..976ff32 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/TestListActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/TestListActivity.java
@@ -16,8 +16,10 @@
 
 package com.android.cts.verifier;
 
+import android.Manifest;
 import android.app.ListActivity;
 import android.content.Intent;
+import android.content.pm.PackageManager;
 import android.os.Bundle;
 import android.util.Log;
 import android.view.Menu;
@@ -32,6 +34,20 @@
 /** Top-level {@link ListActivity} for launching tests and managing results. */
 public class TestListActivity extends AbstractTestListActivity implements View.OnClickListener {
 
+    private static final String [] RUNTIME_PERMISSIONS = {
+        Manifest.permission.ACCESS_FINE_LOCATION,
+        Manifest.permission.BODY_SENSORS,
+        Manifest.permission.READ_EXTERNAL_STORAGE,
+        Manifest.permission.READ_PHONE_STATE,
+        Manifest.permission.CALL_PHONE,
+        Manifest.permission.WRITE_CONTACTS,
+        Manifest.permission.CAMERA,
+        Manifest.permission.WRITE_EXTERNAL_STORAGE,
+        Manifest.permission.RECORD_AUDIO,
+        Manifest.permission.READ_CONTACTS
+    };
+    private static final int CTS_VERIFIER_PERMISSION_REQUEST = 1;
+
     private static final String TAG = TestListActivity.class.getSimpleName();
 
     @Override
@@ -43,6 +59,18 @@
     protected void onCreate(Bundle savedInstanceState) {
         super.onCreate(savedInstanceState);
 
+        for (String runtimePermission : RUNTIME_PERMISSIONS) {
+            Log.v(TAG, "Checking permissions for: " + runtimePermission);
+            if (checkSelfPermission(runtimePermission) != PackageManager.PERMISSION_GRANTED) {
+                requestPermissions(RUNTIME_PERMISSIONS, CTS_VERIFIER_PERMISSION_REQUEST);
+                return;
+            }
+
+        }
+        createContinue();
+    }
+
+    private void createContinue() {
         if (!isTaskRoot()) {
             finish();
         }
@@ -63,6 +91,19 @@
     }
 
     @Override
+    public void onRequestPermissionsResult(
+            int requestCode, String permissions[], int[] grantResults) {
+        if (requestCode == CTS_VERIFIER_PERMISSION_REQUEST) {
+            if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
+                createContinue();
+                return;
+            }
+            Log.v(TAG, "Permission not granted.");
+            Toast.makeText(this, R.string.runtime_permissions_error, Toast.LENGTH_SHORT).show();
+        }
+    }
+
+    @Override
     public boolean onCreateOptionsMenu(Menu menu) {
         MenuInflater inflater = getMenuInflater();
         inflater.inflate(R.menu.test_list_menu, menu);
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/TestListAdapter.java b/apps/CtsVerifier/src/com/android/cts/verifier/TestListAdapter.java
index 2160902..ce092cc 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/TestListAdapter.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/TestListAdapter.java
@@ -141,7 +141,7 @@
             return new TestListItem(title, null, null, null, null, null);
         }
 
-        private TestListItem(String title, String testName, Intent intent,
+        protected TestListItem(String title, String testName, Intent intent,
                 String[] requiredFeatures, String[] excludedFeatures, String[] applicableFeatures) {
             this.title = title;
             this.testName = testName;
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/TestResult.java b/apps/CtsVerifier/src/com/android/cts/verifier/TestResult.java
index d8a675c..c5d2d52 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/TestResult.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/TestResult.java
@@ -71,13 +71,18 @@
     private static Intent createResult(Activity activity, int testResult, String testName,
             String testDetails, ReportLog reportLog) {
         Intent data = new Intent(activity, activity.getClass());
-        data.putExtra(TEST_NAME, testName);
-        data.putExtra(TEST_RESULT, testResult);
-        data.putExtra(TEST_DETAILS, testDetails);
-        data.putExtra(TEST_METRICS, reportLog);
+        addResultData(data, testResult, testName, testDetails, reportLog);
         return data;
     }
 
+    public static void addResultData(Intent intent, int testResult, String testName,
+            String testDetails, ReportLog reportLog) {
+        intent.putExtra(TEST_NAME, testName);
+        intent.putExtra(TEST_RESULT, testResult);
+        intent.putExtra(TEST_DETAILS, testDetails);
+        intent.putExtra(TEST_METRICS, reportLog);
+    }
+
     /**
      * Convert the test activity's result into a {@link TestResult}. Only meant to be used by
      * {@link TestListActivity}.
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/TestResultsReport.java b/apps/CtsVerifier/src/com/android/cts/verifier/TestResultsReport.java
index dc2502c..05c5e77 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/TestResultsReport.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/TestResultsReport.java
@@ -96,6 +96,7 @@
         xml.startTag(null, VERIFIER_INFO_TAG);
         xml.attribute(null, "version-name", Version.getVersionName(mContext));
         xml.attribute(null, "version-code", Integer.toString(Version.getVersionCode(mContext)));
+        xml.attribute(null, "build", Version.getBuildNumber(mContext));
         xml.endTag(null, VERIFIER_INFO_TAG);
 
         xml.startTag(null, DEVICE_INFO_TAG);
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/Version.java b/apps/CtsVerifier/src/com/android/cts/verifier/Version.java
index e7b6121..dfe9508 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/Version.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/Version.java
@@ -24,13 +24,21 @@
 class Version {
 
     static String getVersionName(Context context) {
-        return getPackageInfo(context).versionName;
+        return getVersionNameStrings(context)[0];
     }
 
     static int getVersionCode(Context context) {
         return getPackageInfo(context).versionCode;
     }
 
+    static String getBuildNumber(Context context) {
+        return getVersionNameStrings(context)[1];
+    }
+
+    static private String[] getVersionNameStrings(Context context) {
+        return getPackageInfo(context).versionName.split(" ");
+    }
+
     static PackageInfo getPackageInfo(Context context) {
         try {
             PackageManager packageManager = context.getPackageManager();
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioBandSpecs.java b/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioBandSpecs.java
new file mode 100644
index 0000000..9af4af1
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioBandSpecs.java
@@ -0,0 +1,90 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cts.verifier.audio;
+
+public class AudioBandSpecs {
+    double mFreqStart;
+    double mFreqStop;
+    double mRippleStartTop;
+    double mRippleStartBottom;
+
+    double mRippleStopTop;
+    double mRippleStopBottom;
+
+    double mOffset;
+
+    public AudioBandSpecs(double fStart, double fStop, double startTop, double startBottom,
+            double stopTop, double stopBottom) {
+        initFreq(fStart, fStop);
+        initRipple(startTop, startBottom, stopTop, stopBottom);
+        setOffset(0);
+    }
+
+    public void initRipple(double startTop, double startBottom, double stopTop, double stopBottom) {
+        mRippleStartTop = startTop;
+        mRippleStartBottom = startBottom;
+        mRippleStopTop = stopTop;
+        mRippleStopBottom = stopBottom;
+        // note: top should be >= bottom, but no check is done here.
+    }
+
+    public void initFreq(double fStart, double fStop) {
+        mFreqStart = fStart;
+        mFreqStop = fStop;
+    }
+
+    public void setOffset(double offset) {
+        mOffset = offset;
+    }
+
+    /**
+     * Check if the given point is in bounds in this band.
+     */
+    public boolean isInBounds(double freq, double value) {
+        if (freq < mFreqStart || freq > mFreqStop) {
+            return false;
+        }
+
+        double d = mFreqStop - mFreqStart;
+        if (d <= 0) {
+            return false;
+        }
+
+        double e = freq - mFreqStart;
+        double vTop = (e / d) * (mRippleStopTop - mRippleStartTop) + mRippleStartTop + mOffset;
+        if (value > vTop) {
+            return false;
+        }
+
+        double vBottom = (e / d) * (mRippleStopBottom - mRippleStartBottom) + mRippleStartBottom
+                + mOffset;
+
+        if (value < vBottom) {
+            return false;
+        }
+        return true;
+    }
+
+    public String toString() {
+        StringBuilder sb = new StringBuilder();
+        sb.append(String.format("Freq %.1f - %.1f |", mFreqStart, mFreqStop));
+        sb.append(String.format("start [%.1f : %.1f] |", mRippleStartTop, mRippleStartBottom));
+        sb.append(String.format("stop  [%.1f : %.1f] |", mRippleStopTop, mRippleStopBottom));
+        sb.append(String.format("offset %.1f", mOffset));
+        return sb.toString();
+    }
+}
\ No newline at end of file
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioDeviceNotificationsActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioDeviceNotificationsActivity.java
new file mode 100644
index 0000000..93e0507
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioDeviceNotificationsActivity.java
@@ -0,0 +1,87 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cts.verifier.audio;
+
+import com.android.cts.verifier.PassFailButtons;
+import com.android.cts.verifier.R;
+
+import android.content.Context;
+
+import android.media.AudioDeviceCallback;
+import android.media.AudioDeviceInfo;
+import android.media.AudioManager;
+
+import android.os.Bundle;
+
+import android.view.View;
+import android.view.View.OnClickListener;
+
+import android.widget.Button;
+import android.widget.TextView;
+
+/**
+ * Tests Audio Device Connection events by prompting the user to insert/remove a wired headset
+ * and noting the presence (or absence) of notifictions.
+ */
+public class AudioDeviceNotificationsActivity extends PassFailButtons.Activity {
+    Context mContext;
+
+    TextView mConnectView;
+    TextView mDisconnectView;
+    Button mClearMsgsBtn;
+
+    private class TestAudioDeviceCallback extends AudioDeviceCallback {
+        public void onAudioDevicesAdded(AudioDeviceInfo[] addedDevices) {
+            if (addedDevices.length != 0) {
+                mConnectView.setText(
+                    mContext.getResources().getString(R.string.audio_dev_notification_connectMsg));
+            }
+        }
+
+        public void onAudioDevicesRemoved(AudioDeviceInfo[] removedDevices) {
+            if (removedDevices.length != 0) {
+                mDisconnectView.setText(
+                    mContext.getResources().getString(
+                        R.string.audio_dev_notification_disconnectMsg));
+            }
+        }
+    }
+
+    @Override
+    protected void onCreate(Bundle savedInstanceState) {
+        super.onCreate(savedInstanceState);
+        setContentView(R.layout.audio_dev_notify);
+
+        mContext = this;
+
+        mConnectView = (TextView)findViewById(R.id.audio_dev_notification_connect_msg);
+        mDisconnectView = (TextView)findViewById(R.id.audio_dev_notification_disconnect_msg);
+
+        mClearMsgsBtn = (Button)findViewById(R.id.audio_dev_notification_connect_clearmsgs_btn);
+        mClearMsgsBtn.setOnClickListener(new View.OnClickListener() {
+            public void onClick(View v) {
+                mConnectView.setText("");
+                mDisconnectView.setText("");
+            }
+        });
+
+        AudioManager audioManager = (AudioManager)getSystemService(Context.AUDIO_SERVICE);
+        audioManager.registerAudioDeviceCallback(new TestAudioDeviceCallback(), null);
+
+        setPassFailButtonClickListeners();
+    }
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioFrequencyLineActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioFrequencyLineActivity.java
new file mode 100644
index 0000000..d3e2571
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioFrequencyLineActivity.java
@@ -0,0 +1,664 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cts.verifier.audio;
+
+import com.android.cts.verifier.PassFailButtons;
+import com.android.cts.verifier.R;
+import com.android.cts.verifier.audio.wavelib.*;
+import com.android.compatibility.common.util.ReportLog;
+import com.android.compatibility.common.util.ResultType;
+import com.android.compatibility.common.util.ResultUnit;
+import android.content.Context;
+
+import android.media.AudioDeviceCallback;
+import android.media.AudioDeviceInfo;
+import android.media.AudioFormat;
+import android.media.AudioManager;
+import android.media.AudioTrack;
+import android.media.AudioRecord;
+import android.media.MediaRecorder;
+
+import android.os.Bundle;
+import android.os.Handler;
+import android.os.Message;
+import android.os.SystemClock;
+
+import android.util.Log;
+
+import android.view.View;
+import android.view.View.OnClickListener;
+
+import android.widget.Button;
+import android.widget.TextView;
+import android.widget.SeekBar;
+import android.widget.LinearLayout;
+import android.widget.ProgressBar;
+
+/**
+ * Tests Audio Device roundtrip latency by using a loopback plug.
+ */
+public class AudioFrequencyLineActivity extends PassFailButtons.Activity implements Runnable,
+    AudioRecord.OnRecordPositionUpdateListener {
+    private static final String TAG = "AudioFrequencyLineActivity";
+
+    static final int TEST_STARTED = 900;
+    static final int TEST_ENDED = 901;
+    static final int TEST_MESSAGE = 902;
+    static final double MIN_ENERGY_BAND_1 = -20.0;
+    static final double MIN_FRACTION_POINTS_IN_BAND = 0.3;
+
+    OnBtnClickListener mBtnClickListener = new OnBtnClickListener();
+    Context mContext;
+
+    Button mLoopbackPlugReady;
+    LinearLayout mLinearLayout;
+    Button mTestButton;
+    TextView mResultText;
+    ProgressBar mProgressBar;
+    //recording
+    private boolean mIsRecording = false;
+    private final Object mRecordingLock = new Object();
+    private AudioRecord mRecorder;
+    private int mMinRecordBufferSizeInSamples = 0;
+    private short[] mAudioShortArray;
+    private short[] mAudioShortArray2;
+
+    private final int mBlockSizeSamples = 1024;
+    private final int mSamplingRate = 48000;
+    private final int mSelectedRecordSource = MediaRecorder.AudioSource.VOICE_RECOGNITION;
+    private final int mChannelConfig = AudioFormat.CHANNEL_IN_MONO;
+    private final int mAudioFormat = AudioFormat.ENCODING_PCM_16BIT;
+    private volatile Thread mRecordThread;
+    private boolean mRecordThreadShutdown = false;
+
+    PipeShort mPipe = new PipeShort(65536);
+    SoundPlayerObject mSPlayer;
+
+    private DspBufferComplex mC;
+    private DspBufferDouble mData;
+
+    private DspWindow mWindow;
+    private DspFftServer mFftServer;
+    private VectorAverage mFreqAverageMain = new VectorAverage();
+
+    private VectorAverage mFreqAverage0 = new VectorAverage();
+    private VectorAverage mFreqAverage1 = new VectorAverage();
+
+    private int mCurrentTest = -1;
+    int mBands = 4;
+    AudioBandSpecs[] bandSpecsArray = new AudioBandSpecs[mBands];
+
+    int mMaxLevel;
+    private class OnBtnClickListener implements OnClickListener {
+        @Override
+        public void onClick(View v) {
+            switch (v.getId()) {
+                case R.id.audio_frequency_line_plug_ready_btn:
+                    Log.i(TAG, "audio loopback plug ready");
+                    //enable all the other views.
+                    enableLayout(true);
+                    break;
+                case R.id.audio_frequency_line_test_btn:
+                    Log.i(TAG, "audio loopback test");
+                    startAudioTest();
+                    break;
+            }
+        }
+    }
+
+    @Override
+    protected void onCreate(Bundle savedInstanceState) {
+        super.onCreate(savedInstanceState);
+        setContentView(R.layout.audio_frequency_line_activity);
+
+        mContext = this;
+
+        mLoopbackPlugReady = (Button)findViewById(R.id.audio_frequency_line_plug_ready_btn);
+        mLoopbackPlugReady.setOnClickListener(mBtnClickListener);
+        mLinearLayout = (LinearLayout)findViewById(R.id.audio_frequency_line_layout);
+        mTestButton = (Button)findViewById(R.id.audio_frequency_line_test_btn);
+        mTestButton.setOnClickListener(mBtnClickListener);
+        mResultText = (TextView)findViewById(R.id.audio_frequency_line_results_text);
+        mProgressBar = (ProgressBar)findViewById(R.id.audio_frequency_line_progress_bar);
+        showWait(false);
+        enableLayout(false);         //disabled all content
+
+        mSPlayer = new SoundPlayerObject();
+        mSPlayer.setSoundWithResId(getApplicationContext(), R.raw.stereo_mono_white_noise_48);
+        mSPlayer.setBalance(0.5f);
+
+        //Init FFT stuff
+        mAudioShortArray2 = new short[mBlockSizeSamples*2];
+        mData = new DspBufferDouble(mBlockSizeSamples);
+        mC = new DspBufferComplex(mBlockSizeSamples);
+        mFftServer = new DspFftServer(mBlockSizeSamples);
+
+        int overlap = mBlockSizeSamples / 2;
+
+        mWindow = new DspWindow(DspWindow.WINDOW_HANNING, mBlockSizeSamples, overlap);
+
+        setPassFailButtonClickListeners();
+        getPassButton().setEnabled(false);
+        setInfoResources(R.string.audio_frequency_line_test,
+                R.string.audio_frequency_line_info, -1);
+
+        //Init bands
+        bandSpecsArray[0] = new AudioBandSpecs(
+                50, 500,        /* frequency start,stop */
+                -20.0, -50,     /* start top,bottom value */
+                4.0, -4.0       /* stop top,bottom value */);
+
+        bandSpecsArray[1] = new AudioBandSpecs(
+                500,4000,       /* frequency start,stop */
+                4.0, -4.0,      /* start top,bottom value */
+                4.0, -4.0        /* stop top,bottom value */);
+
+        bandSpecsArray[2] = new AudioBandSpecs(
+                4000, 12000,    /* frequency start,stop */
+                4.0, -4.0,      /* start top,bottom value */
+                5.0, -5.0       /* stop top,bottom value */);
+
+        bandSpecsArray[3] = new AudioBandSpecs(
+                12000, 20000,   /* frequency start,stop */
+                5.0, -5.0,      /* start top,bottom value */
+                5.0, -30.0      /* stop top,bottom value */);
+    }
+
+    /**
+     * enable test ui elements
+     */
+    private void enableLayout(boolean enable) {
+        for (int i = 0; i < mLinearLayout.getChildCount(); i++) {
+            View view = mLinearLayout.getChildAt(i);
+            view.setEnabled(enable);
+        }
+    }
+
+    /**
+     * show active progress bar
+     */
+    private void showWait(boolean show) {
+        if (show) {
+            mProgressBar.setVisibility(View.VISIBLE);
+        } else {
+            mProgressBar.setVisibility(View.INVISIBLE);
+        }
+    }
+
+    private void setMaxLevel() {
+        AudioManager am = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
+        mMaxLevel = am.getStreamMaxVolume(AudioManager.STREAM_MUSIC);
+        am.setStreamVolume(AudioManager.STREAM_MUSIC, (int)(mMaxLevel), 0);
+    }
+
+    /**
+     *  Start the loopback audio test
+     */
+    private void startAudioTest() {
+        if (mTestThread != null && !mTestThread.isAlive()) {
+            mTestThread = null; //kill it.
+        }
+
+        if (mTestThread == null) {
+            Log.v(TAG,"Executing test Thread");
+            mTestThread = new Thread(mPlayRunnable);
+            getPassButton().setEnabled(false);
+            if (!mSPlayer.isAlive())
+                mSPlayer.start();
+            mTestThread.start();
+        } else {
+            Log.v(TAG,"test Thread already running.");
+        }
+    }
+
+    Thread mTestThread;
+    Runnable mPlayRunnable = new Runnable() {
+        public void run() {
+            Message msg = Message.obtain();
+            msg.what = TEST_STARTED;
+            mMessageHandler.sendMessage(msg);
+            setMaxLevel();
+
+            sendMessage("Testing Left Capture");
+            mCurrentTest = 0;
+            mFreqAverage0.reset();
+            mSPlayer.setBalance(0.0f);
+            play();
+
+            sendMessage("Testing Right Capture");
+            mCurrentTest = 1;
+            mFreqAverage1.reset();
+            mSPlayer.setBalance(1.0f);
+            play();
+
+            mCurrentTest = -1;
+            sendMessage("Testing Completed");
+
+            Message msg2 = Message.obtain();
+            msg2.what = TEST_ENDED;
+            mMessageHandler.sendMessage(msg2);
+        }
+
+        private void play() {
+            startRecording();
+            mSPlayer.play(true);
+
+            try {
+                Thread.sleep(2000);
+            } catch (InterruptedException e) {
+                e.printStackTrace();
+            }
+
+            mSPlayer.play(false);
+            stopRecording();
+        }
+
+        private void sendMessage(String str) {
+            Message msg = Message.obtain();
+            msg.what = TEST_MESSAGE;
+            msg.obj = str;
+            mMessageHandler.sendMessage(msg);
+        }
+    };
+
+    private Handler mMessageHandler = new Handler() {
+        public void handleMessage(Message msg) {
+            super.handleMessage(msg);
+            switch (msg.what) {
+            case TEST_STARTED:
+                showWait(true);
+                getPassButton().setEnabled(false);
+                break;
+            case TEST_ENDED:
+                showWait(false);
+                computeResults();
+                break;
+            case TEST_MESSAGE:
+                String str = (String)msg.obj;
+                if (str != null) {
+                    mResultText.setText(str);
+                }
+                break;
+            default:
+                Log.e(TAG, String.format("Unknown message: %d", msg.what));
+            }
+        }
+    };
+
+    private class Results {
+        private String mLabel;
+        public double[] mValuesLog;
+        int[] mPointsPerBand = new int[mBands];
+        double[] mAverageEnergyPerBand = new double[mBands];
+        int[] mInBoundPointsPerBand = new int[mBands];
+        public Results(String label) {
+            mLabel = label;
+        }
+
+        //append results
+        public String toString() {
+            StringBuilder sb = new StringBuilder();
+            sb.append(String.format("Channel %s\n", mLabel));
+            sb.append("Level in Band 1 : " + (testLevel() ? "OK" :"FAILED") +"\n");
+            for (int b = 0; b < mBands; b++) {
+                double percent = 0;
+                if (mPointsPerBand[b] > 0) {
+                    percent = 100.0 * (double)mInBoundPointsPerBand[b] / mPointsPerBand[b];
+                }
+                sb.append(String.format(
+                        " Band %d: Av. Level: %.1f dB InBand: %d/%d (%.1f%%) %s\n",
+                        b, mAverageEnergyPerBand[b],
+                        mInBoundPointsPerBand[b],
+                        mPointsPerBand[b],
+                        percent,
+                        (testInBand(b) ? "OK" : "FAILED")));
+            }
+            return sb.toString();
+        }
+
+        public boolean testLevel() {
+            if (mAverageEnergyPerBand[1] >= MIN_ENERGY_BAND_1) {
+                return true;
+            }
+            return false;
+        }
+
+        public boolean testInBand(int b) {
+            if (b >= 0 && b < mBands && mPointsPerBand[b] > 0) {
+                if ((double)mInBoundPointsPerBand[b] / mPointsPerBand[b] >
+                MIN_FRACTION_POINTS_IN_BAND)
+                    return true;
+            }
+            return false;
+        }
+
+        public boolean testAll() {
+            if (!testLevel()) {
+                return false;
+            }
+            for (int b = 0; b < mBands; b++) {
+                if (!testInBand(b)) {
+                    return false;
+                }
+            }
+            return true;
+        }
+    }
+
+    /**
+     * compute test results
+     */
+    private void computeResults() {
+        Results resultsLeft = new Results("Left");
+        computeResultsForVector(mFreqAverage0, resultsLeft);
+        Results resultsRight = new Results("Right");
+        computeResultsForVector(mFreqAverage1, resultsRight);
+        if (resultsLeft.testAll() && resultsRight.testAll()) {
+            //enable button
+            getPassButton().setEnabled(true);
+        }
+    }
+
+    private void computeResultsForVector(VectorAverage freqAverage,Results results) {
+
+        int points = freqAverage.getSize();
+        if (points > 0) {
+            //compute vector in db
+            double[] values = new double[points];
+            freqAverage.getData(values, false);
+            results.mValuesLog = new double[points];
+            for (int i = 0; i < points; i++) {
+                results.mValuesLog[i] = 20 * Math.log10(values[i]);
+            }
+
+            int currentBand = 0;
+            for (int i = 0; i < points; i++) {
+                double freq = (double)mSamplingRate * i / (double)mBlockSizeSamples;
+                if (freq > bandSpecsArray[currentBand].mFreqStop) {
+                    currentBand++;
+                    if (currentBand >= mBands)
+                        break;
+                }
+
+                if (freq >= bandSpecsArray[currentBand].mFreqStart) {
+                    results.mAverageEnergyPerBand[currentBand] += results.mValuesLog[i];
+                    results.mPointsPerBand[currentBand]++;
+                }
+            }
+
+            for (int b = 0; b < mBands; b++) {
+                if (results.mPointsPerBand[b] > 0) {
+                    results.mAverageEnergyPerBand[b] =
+                            results.mAverageEnergyPerBand[b] / results.mPointsPerBand[b];
+                }
+            }
+
+            //set offset relative to band 1 level
+            for (int b = 0; b < mBands; b++) {
+                bandSpecsArray[b].setOffset(results.mAverageEnergyPerBand[1]);
+            }
+
+            //test points in band.
+            currentBand = 0;
+            for (int i = 0; i < points; i++) {
+                double freq = (double)mSamplingRate * i / (double)mBlockSizeSamples;
+                if (freq >  bandSpecsArray[currentBand].mFreqStop) {
+                    currentBand++;
+                    if (currentBand >= mBands)
+                        break;
+                }
+
+                if (freq >= bandSpecsArray[currentBand].mFreqStart) {
+                    double value = results.mValuesLog[i];
+                    if (bandSpecsArray[currentBand].isInBounds(freq, value)) {
+                        results.mInBoundPointsPerBand[currentBand]++;
+                    }
+                }
+            }
+
+            appendResultsToScreen(results.toString());
+            //store results
+            recordTestResults(results);
+        } else {
+            appendResultsToScreen("Failed testing channel " + results.mLabel);
+        }
+    }
+
+    //append results
+    private void appendResultsToScreen(String str) {
+        String currentText = mResultText.getText().toString();
+        mResultText.setText(currentText + "\n" + str);
+    }
+
+    /**
+     * Store test results in log
+     */
+    private void recordTestResults(Results results) {
+        String channelLabel = "channel_" + results.mLabel;
+
+        for (int b = 0; b < mBands; b++) {
+            String bandLabel = String.format(channelLabel + "_%d", b);
+            getReportLog().addValue(
+                    bandLabel + "_Level",
+                    results.mAverageEnergyPerBand[b],
+                    ResultType.HIGHER_BETTER,
+                    ResultUnit.NONE);
+
+            getReportLog().addValue(
+                    bandLabel + "_pointsinbound",
+                    results.mInBoundPointsPerBand[b],
+                    ResultType.HIGHER_BETTER,
+                    ResultUnit.COUNT);
+
+            getReportLog().addValue(
+                    bandLabel + "_pointstotal",
+                    results.mPointsPerBand[b],
+                    ResultType.NEUTRAL,
+                    ResultUnit.COUNT);
+        }
+
+        getReportLog().addValues(channelLabel + "_magnitudeSpectrumLog",
+                results.mValuesLog,
+                ResultType.NEUTRAL,
+                ResultUnit.NONE);
+
+        Log.v(TAG, "Results Recorded");
+    }
+
+    private void startRecording() {
+        synchronized (mRecordingLock) {
+            mIsRecording = true;
+        }
+
+        boolean successful = initRecord();
+        if (successful) {
+            startRecordingForReal();
+        } else {
+            Log.v(TAG, "Recorder initialization error.");
+            synchronized (mRecordingLock) {
+                mIsRecording = false;
+            }
+        }
+    }
+
+    private void startRecordingForReal() {
+        // start streaming
+        if (mRecordThread == null) {
+            mRecordThread = new Thread(AudioFrequencyLineActivity.this);
+            mRecordThread.setName("FrequencyAnalyzerThread");
+            mRecordThreadShutdown = false;
+        }
+        if (!mRecordThread.isAlive()) {
+            mRecordThread.start();
+        }
+
+        mPipe.flush();
+
+        long startTime = SystemClock.uptimeMillis();
+        mRecorder.startRecording();
+        if (mRecorder.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
+            stopRecording();
+            return;
+        }
+        Log.v(TAG, "Start time: " + (long) (SystemClock.uptimeMillis() - startTime) + " ms");
+    }
+
+    private void stopRecording() {
+        synchronized (mRecordingLock) {
+            stopRecordingForReal();
+            mIsRecording = false;
+        }
+    }
+
+    private void stopRecordingForReal() {
+
+        // stop streaming
+        Thread zeThread = mRecordThread;
+        mRecordThread = null;
+        mRecordThreadShutdown = true;
+        if (zeThread != null) {
+            zeThread.interrupt();
+            try {
+                zeThread.join();
+            } catch(InterruptedException e) {
+                Log.v(TAG,"Error shutting down recording thread " + e);
+                //we don't really care about this error, just logging it.
+            }
+        }
+         // release recording resources
+        if (mRecorder != null) {
+            mRecorder.stop();
+            mRecorder.release();
+            mRecorder = null;
+        }
+    }
+
+    private boolean initRecord() {
+        int minRecordBuffSizeInBytes = AudioRecord.getMinBufferSize(mSamplingRate,
+                mChannelConfig, mAudioFormat);
+        Log.v(TAG,"FrequencyAnalyzer: min buff size = " + minRecordBuffSizeInBytes + " bytes");
+        if (minRecordBuffSizeInBytes <= 0) {
+            return false;
+        }
+
+        mMinRecordBufferSizeInSamples = minRecordBuffSizeInBytes / 2;
+        // allocate the byte array to read the audio data
+
+        mAudioShortArray = new short[mMinRecordBufferSizeInSamples];
+
+        Log.v(TAG, "Initiating record:");
+        Log.v(TAG, "      using source " + mSelectedRecordSource);
+        Log.v(TAG, "      at " + mSamplingRate + "Hz");
+
+        try {
+            mRecorder = new AudioRecord(mSelectedRecordSource, mSamplingRate,
+                    mChannelConfig, mAudioFormat, 2 * minRecordBuffSizeInBytes);
+        } catch (IllegalArgumentException e) {
+            return false;
+        }
+        if (mRecorder.getState() != AudioRecord.STATE_INITIALIZED) {
+            mRecorder.release();
+            mRecorder = null;
+            return false;
+        }
+        mRecorder.setRecordPositionUpdateListener(this);
+        mRecorder.setPositionNotificationPeriod(mBlockSizeSamples / 2);
+        return true;
+    }
+
+    // ---------------------------------------------------------
+    // Implementation of AudioRecord.OnPeriodicNotificationListener
+    // --------------------
+    public void onPeriodicNotification(AudioRecord recorder) {
+        int samplesAvailable = mPipe.availableToRead();
+        int samplesNeeded = mBlockSizeSamples;
+        if (samplesAvailable >= samplesNeeded) {
+            mPipe.read(mAudioShortArray2, 0, samplesNeeded);
+
+            //compute stuff.
+            double maxval = Math.pow(2, 15);
+            int clipcount = 0;
+            double cliplevel = (maxval-10) / maxval;
+            double sum = 0;
+            double maxabs = 0;
+            int i;
+            int index = 0;
+
+            for (i = 0; i < samplesNeeded; i++) {
+                double value = mAudioShortArray2[i] / maxval;
+                double valueabs = Math.abs(value);
+
+                if (valueabs > maxabs) {
+                    maxabs = valueabs;
+                }
+
+                if (valueabs > cliplevel) {
+                    clipcount++;
+                }
+
+                sum += value * value;
+                //fft stuff
+                if (index < mBlockSizeSamples) {
+                    mData.mData[index] = value;
+                }
+                index++;
+            }
+
+            //for the current frame, compute FFT and send to the viewer.
+
+            //apply window and pack as complex for now.
+            DspBufferMath.mult(mData, mData, mWindow.mBuffer);
+            DspBufferMath.set(mC, mData);
+            mFftServer.fft(mC, 1);
+
+            double[] halfMagnitude = new double[mBlockSizeSamples / 2];
+            for (i = 0; i < mBlockSizeSamples / 2; i++) {
+                halfMagnitude[i] = Math.sqrt(mC.mReal[i] * mC.mReal[i] + mC.mImag[i] * mC.mImag[i]);
+            }
+
+            mFreqAverageMain.setData(halfMagnitude, false); //average all of them!
+
+            switch(mCurrentTest) {
+                case 0:
+                    mFreqAverage0.setData(halfMagnitude, false);
+                    break;
+                case 1:
+                    mFreqAverage1.setData(halfMagnitude, false);
+                    break;
+            }
+        }
+    }
+
+    public void onMarkerReached(AudioRecord track) {
+    }
+
+    // ---------------------------------------------------------
+    // Implementation of Runnable for the audio recording + playback
+    // --------------------
+    public void run() {
+        int nSamplesRead = 0;
+
+        Thread thisThread = Thread.currentThread();
+        while (mRecordThread == thisThread && !mRecordThreadShutdown) {
+            // read from native recorder
+            nSamplesRead = mRecorder.read(mAudioShortArray, 0, mMinRecordBufferSizeInSamples);
+            if (nSamplesRead > 0) {
+                mPipe.write(mAudioShortArray, 0, nSamplesRead);
+            }
+        }
+    }
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioFrequencyMicActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioFrequencyMicActivity.java
new file mode 100644
index 0000000..b37a721
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioFrequencyMicActivity.java
@@ -0,0 +1,851 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cts.verifier.audio;
+
+import com.android.cts.verifier.PassFailButtons;
+import com.android.cts.verifier.R;
+import com.android.cts.verifier.audio.wavelib.*;
+import com.android.compatibility.common.util.ReportLog;
+import com.android.compatibility.common.util.ResultType;
+import com.android.compatibility.common.util.ResultUnit;
+import android.content.Context;
+import android.content.BroadcastReceiver;
+import android.content.Intent;
+import android.content.IntentFilter;
+
+import android.media.AudioDeviceCallback;
+import android.media.AudioDeviceInfo;
+import android.media.AudioFormat;
+import android.media.AudioManager;
+import android.media.AudioTrack;
+import android.media.AudioRecord;
+import android.media.MediaRecorder;
+
+import android.os.Bundle;
+import android.os.Handler;
+import android.os.Message;
+import android.os.SystemClock;
+
+import android.util.Log;
+
+import android.view.View;
+import android.view.View.OnClickListener;
+
+import android.widget.Button;
+import android.widget.TextView;
+import android.widget.SeekBar;
+import android.widget.LinearLayout;
+import android.widget.ProgressBar;
+
+/**
+ * Tests Audio built in Microphone response using external speakers and USB reference microphone.
+ */
+public class AudioFrequencyMicActivity extends PassFailButtons.Activity implements Runnable,
+    AudioRecord.OnRecordPositionUpdateListener {
+    private static final String TAG = "AudioFrequencyMicActivity";
+
+    private static final int TEST_STARTED = 900;
+    private static final int TEST_ENDED = 901;
+    private static final int TEST_MESSAGE = 902;
+    private static final int TEST1_MESSAGE = 903;
+    private static final int TEST1_ENDED = 904;
+    private static final double MIN_ENERGY_BAND_1 = -50.0;          //dB Full Scale
+    private static final double MAX_ENERGY_BAND_1_BASE = -60.0;     //dB Full Scale
+    private static final double MIN_FRACTION_POINTS_IN_BAND = 0.3;
+    private static final double MAX_VAL = Math.pow(2, 15);
+    private static final double CLIP_LEVEL = (MAX_VAL-10) / MAX_VAL;
+
+    final OnBtnClickListener mBtnClickListener = new OnBtnClickListener();
+    Context mContext;
+
+    Button mSpeakersReady;              //user signal to have connected external speakers
+    Button mTest1Button;                //execute test 1
+    Button mUsbMicReady;          //user signal to have connected USB Microphone
+    Button mTest2Button;                 //user to start test
+    String mUsbDevicesInfo;             //usb device info for report
+    LinearLayout mLayoutTest1;
+    LinearLayout mLayoutTest2a;
+    LinearLayout mLayoutTest2b;
+
+    TextView mSpeakerReadyText;
+    TextView mTest2Result;
+    TextView mUsbStatusText;
+    TextView mTest1Result;
+    ProgressBar mProgressBar;
+
+    private boolean mIsRecording = false;
+    private final Object mRecordingLock = new Object();
+    private AudioRecord mRecorder;
+    private int mMinRecordBufferSizeInSamples = 0;
+    private short[] mAudioShortArray;
+    private short[] mAudioShortArray2;
+
+    private final int mBlockSizeSamples = 1024;
+    private final int mSamplingRate = 48000;
+    private final int mSelectedRecordSource = MediaRecorder.AudioSource.VOICE_RECOGNITION;
+    private final int mChannelConfig = AudioFormat.CHANNEL_IN_MONO;
+    private final int mAudioFormat = AudioFormat.ENCODING_PCM_16BIT;
+    private Thread mRecordThread;
+
+    PipeShort mPipe = new PipeShort(65536);
+    SoundPlayerObject mSPlayer;
+
+    private DspBufferComplex mC;
+    private DspBufferDouble mData;
+
+    private DspWindow mWindow;
+    private DspFftServer mFftServer;
+    private VectorAverage mFreqAverageMain = new VectorAverage();
+
+    private VectorAverage mFreqAverageBase = new VectorAverage();
+    private VectorAverage mFreqAverageBuiltIn = new VectorAverage();
+    private VectorAverage mFreqAverageReference = new VectorAverage();
+
+    private int mCurrentTest = -1;
+    int mBands = 4;
+    AudioBandSpecs[] bandSpecsArray = new AudioBandSpecs[mBands];
+    AudioBandSpecs[] baseBandSpecsArray = new AudioBandSpecs[mBands];
+
+    int mMaxLevel;
+    private class OnBtnClickListener implements OnClickListener {
+        @Override
+        public void onClick(View v) {
+            switch (v.getId()) {
+            case R.id.audio_frequency_mic_speakers_ready_btn:
+                testSpeakersReady();
+                break;
+            case R.id.audio_frequency_mic_test1_btn:
+                startTest1();
+                break;
+            case R.id.audio_frequency_mic_mic_ready_btn:
+                testUSB();
+                break;
+            case R.id.audio_frequency_mic_test2_btn:
+                startTest2();
+                break;
+            }
+        }
+    }
+
+    @Override
+    protected void onCreate(Bundle savedInstanceState) {
+        super.onCreate(savedInstanceState);
+        setContentView(R.layout.audio_frequency_mic_activity);
+        mContext = this;
+        mSpeakerReadyText = (TextView) findViewById(R.id.audio_frequency_mic_speakers_ready_status);
+
+        mSpeakersReady  = (Button)findViewById(R.id.audio_frequency_mic_speakers_ready_btn);
+        mSpeakersReady.setOnClickListener(mBtnClickListener);
+        mTest1Button = (Button)findViewById(R.id.audio_frequency_mic_test1_btn);
+        mTest1Button.setOnClickListener(mBtnClickListener);
+        mTest1Result = (TextView)findViewById(R.id.audio_frequency_mic_results1_text);
+        mLayoutTest1 = (LinearLayout) findViewById(R.id.audio_frequency_mic_layout_test1);
+        mLayoutTest2a = (LinearLayout) findViewById(R.id.audio_frequency_mic_layout_test2a);
+        mLayoutTest2b = (LinearLayout) findViewById(R.id.audio_frequency_mic_layout_test2b);
+        mUsbMicReady = (Button)findViewById(R.id.audio_frequency_mic_mic_ready_btn);
+        mUsbMicReady.setOnClickListener(mBtnClickListener);
+
+        mUsbStatusText = (TextView)findViewById(R.id.audio_frequency_mic_usb_status);
+        mTest2Button = (Button)findViewById(R.id.audio_frequency_mic_test2_btn);
+        mTest2Button.setOnClickListener(mBtnClickListener);
+        mTest2Result = (TextView)findViewById(R.id.audio_frequency_mic_results_text);
+        mProgressBar = (ProgressBar)findViewById(R.id.audio_frequency_mic_progress_bar);
+        showWait(false);
+        enableLayout(mLayoutTest1, false);
+        enableLayout(mLayoutTest2a, false);
+        enableLayout(mLayoutTest2b, false);
+
+        mSPlayer = new SoundPlayerObject();
+        mSPlayer.setSoundWithResId(getApplicationContext(), R.raw.stereo_mono_white_noise_48);
+        mSPlayer.setBalance(0.5f);
+
+        //Init FFT stuff
+        mAudioShortArray2 = new short[mBlockSizeSamples*2];
+        mData = new DspBufferDouble(mBlockSizeSamples);
+        mC = new DspBufferComplex(mBlockSizeSamples);
+        mFftServer = new DspFftServer(mBlockSizeSamples);
+
+        int overlap = mBlockSizeSamples / 2;
+
+        mWindow = new DspWindow(DspWindow.WINDOW_HANNING, mBlockSizeSamples, overlap);
+
+        setPassFailButtonClickListeners();
+        getPassButton().setEnabled(false);
+        setInfoResources(R.string.audio_frequency_mic_test,
+                R.string.audio_frequency_mic_info, -1);
+
+        //Init bands for BuiltIn/Reference test
+        bandSpecsArray[0] = new AudioBandSpecs(
+                50, 500,        /* frequency start,stop */
+                -20.0, -50,     /* start top,bottom value */
+                4.0, -4.0       /* stop top,bottom value */);
+
+        bandSpecsArray[1] = new AudioBandSpecs(
+                500,4000,       /* frequency start,stop */
+                4.0, -4.0,      /* start top,bottom value */
+                4.0, -4.0        /* stop top,bottom value */);
+
+        bandSpecsArray[2] = new AudioBandSpecs(
+                4000, 12000,    /* frequency start,stop */
+                4.0, -4.0,      /* start top,bottom value */
+                5.0, -5.0       /* stop top,bottom value */);
+
+        bandSpecsArray[3] = new AudioBandSpecs(
+                12000, 20000,   /* frequency start,stop */
+                5.0, -5.0,      /* start top,bottom value */
+                5.0, -30.0      /* stop top,bottom value */);
+
+        //Init base bands for silence
+        baseBandSpecsArray[0] = new AudioBandSpecs(
+                50, 500,        /* frequency start,stop */
+                40.0, -50.0,     /* start top,bottom value */
+                5.0, -50.0       /* stop top,bottom value */);
+
+        baseBandSpecsArray[1] = new AudioBandSpecs(
+                500,4000,       /* frequency start,stop */
+                5.0, -50.0,      /* start top,bottom value */
+                5.0, -50.0        /* stop top,bottom value */);
+
+        baseBandSpecsArray[2] = new AudioBandSpecs(
+                4000, 12000,    /* frequency start,stop */
+                5.0, -50.0,      /* start top,bottom value */
+                5.0, -50.0       /* stop top,bottom value */);
+
+        baseBandSpecsArray[3] = new AudioBandSpecs(
+                12000, 20000,   /* frequency start,stop */
+                5.0, -50.0,      /* start top,bottom value */
+                5.0, -50.0      /* stop top,bottom value */);
+
+    }
+
+    /**
+     * enable test ui elements
+     */
+    private void enableLayout(LinearLayout layout, boolean enable) {
+        for (int i = 0; i < layout.getChildCount(); i++) {
+            View view = layout.getChildAt(i);
+            view.setEnabled(enable);
+        }
+    }
+
+    /**
+     * show active progress bar
+     */
+    private void showWait(boolean show) {
+        if (show) {
+            mProgressBar.setVisibility(View.VISIBLE);
+        } else {
+            mProgressBar.setVisibility(View.INVISIBLE);
+        }
+    }
+
+    private void setMaxLevel() {
+        AudioManager am = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
+        mMaxLevel = am.getStreamMaxVolume(AudioManager.STREAM_MUSIC);
+        am.setStreamVolume(AudioManager.STREAM_MUSIC, (int)(mMaxLevel), 0);
+    }
+
+    private void setMinLevel() {
+        AudioManager am = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
+        am.setStreamVolume(AudioManager.STREAM_MUSIC, 0, 0);
+    }
+
+    /**
+     *  Start the loopback audio test
+     */
+    private void startTest1() {
+        if (mTestThread != null && !mTestThread.isAlive()) {
+            mTestThread = null; //kill it.
+        }
+
+        if (mTestThread == null) {
+            Log.v(TAG,"Executing test Thread");
+            mTestThread = new Thread(mTest1Runnable);
+            //getPassButton().setEnabled(false);
+            if (!mSPlayer.isAlive())
+                mSPlayer.start();
+            mTestThread.start();
+        } else {
+            Log.v(TAG,"test Thread already running.");
+        }
+    }
+
+    Thread mTestThread;
+    Runnable mTest1Runnable = new Runnable() {
+        public void run() {
+            Message msg = Message.obtain();
+            msg.what = TEST_STARTED;
+            mMessageHandler.sendMessage(msg);
+
+            setMinLevel();
+            sendMessage("Testing Background Environment");
+            mCurrentTest = 0;
+            mSPlayer.setBalance(0.5f);
+            mFreqAverageBase.reset();
+            play();
+
+            setMaxLevel();
+            sendMessage("Testing Built in Microphone");
+            mCurrentTest = 1;
+            mFreqAverageBuiltIn.reset();
+            mSPlayer.setBalance(0.5f);
+            play();
+
+            mCurrentTest = -1;
+            sendMessage("Testing Completed");
+
+            Message msg2 = Message.obtain();
+            msg2.what = TEST1_ENDED;
+            mMessageHandler.sendMessage(msg2);
+        }
+
+        private void play() {
+            startRecording();
+            mSPlayer.play(true);
+
+            try {
+                Thread.sleep(2000);
+            } catch (InterruptedException e) {
+                e.printStackTrace();
+                //restore interrupted status
+                Thread.currentThread().interrupt();
+            }
+
+            mSPlayer.play(false);
+            stopRecording();
+        }
+
+        private void sendMessage(String str) {
+            Message msg = Message.obtain();
+            msg.what = TEST1_MESSAGE;
+            msg.obj = str;
+            mMessageHandler.sendMessage(msg);
+        }
+    };
+
+    /**
+     *  Start the loopback audio test
+     */
+    private void startTest2() {
+        if (mTestThread != null && !mTestThread.isAlive()) {
+            mTestThread = null; //kill it.
+        }
+
+        if (mTestThread == null) {
+            Log.v(TAG,"Executing test2 Thread");
+            mTestThread = new Thread(mTest2Runnable);
+            //getPassButton().setEnabled(false);
+            if (!mSPlayer.isAlive())
+                mSPlayer.start();
+            mTestThread.start();
+        } else {
+            Log.v(TAG,"test Thread already running.");
+        }
+    }
+
+    Runnable mTest2Runnable = new Runnable() {
+        public void run() {
+            Message msg = Message.obtain();
+            msg.what = TEST_STARTED;
+            mMessageHandler.sendMessage(msg);
+
+            sendMessage("Testing Reference USB Microphone");
+            mCurrentTest = 2;
+            mFreqAverageReference.reset();
+            mSPlayer.setBalance(0.5f);
+            play();
+
+            mCurrentTest = -1;
+            sendMessage("Testing Completed");
+
+            Message msg2 = Message.obtain();
+            msg2.what = TEST_ENDED;
+            mMessageHandler.sendMessage(msg2);
+        }
+
+        private void play() {
+            startRecording();
+            mSPlayer.play(true);
+
+            try {
+                Thread.sleep(2000);
+            } catch (InterruptedException e) {
+                e.printStackTrace();
+                //restore interrupted status
+                Thread.currentThread().interrupt();
+            }
+
+            mSPlayer.play(false);
+            stopRecording();
+        }
+
+        private void sendMessage(String str) {
+            Message msg = Message.obtain();
+            msg.what = TEST_MESSAGE;
+            msg.obj = str;
+            mMessageHandler.sendMessage(msg);
+        }
+    };
+
+    private Handler mMessageHandler = new Handler() {
+        public void handleMessage(Message msg) {
+            super.handleMessage(msg);
+            switch (msg.what) {
+            case TEST_STARTED:
+                showWait(true);
+                getPassButton().setEnabled(false);
+                break;
+            case TEST_ENDED:
+                showWait(false);
+                computeTest2Results();
+                break;
+            case TEST1_MESSAGE: {
+                    String str = (String)msg.obj;
+                    if (str != null) {
+                        mTest1Result.setText(str);
+                    }
+                }
+                break;
+            case TEST1_ENDED:
+                showWait(false);
+                computeTest1Results();
+                break;
+            case TEST_MESSAGE: {
+                    String str = (String)msg.obj;
+                    if (str != null) {
+                        mTest2Result.setText(str);
+                    }
+                }
+                break;
+            default:
+                Log.e(TAG, String.format("Unknown message: %d", msg.what));
+            }
+        }
+    };
+
+    private class Results {
+        private String mLabel;
+        public double[] mValuesLog;
+        int[] mPointsPerBand = new int[mBands];
+        double[] mAverageEnergyPerBand = new double[mBands];
+        int[] mInBoundPointsPerBand = new int[mBands];
+        public boolean mIsBaseMeasurement = false;
+        public Results(String label) {
+            mLabel = label;
+        }
+
+        //append results
+        public String toString() {
+            StringBuilder sb = new StringBuilder();
+            sb.append(String.format("Channel %s\n", mLabel));
+            sb.append("Level in Band 1 : " + (testLevel() ? "OK" :"FAILED") +
+                    (mIsBaseMeasurement ? " (Base Meas.)" : "") + "\n");
+            for (int b = 0; b < mBands; b++) {
+                double percent = 0;
+                if (mPointsPerBand[b] > 0) {
+                    percent = 100.0 * (double) mInBoundPointsPerBand[b] / mPointsPerBand[b];
+                }
+                sb.append(String.format(
+                        " Band %d: Av. Level: %.1f dB InBand: %d/%d (%.1f%%) %s\n",
+                        b, mAverageEnergyPerBand[b],
+                        mInBoundPointsPerBand[b],
+                        mPointsPerBand[b],
+                        percent,
+                        (testInBand(b) ? "OK" : "FAILED")));
+            }
+            return sb.toString();
+        }
+
+        public boolean testLevel() {
+            if (mIsBaseMeasurement && mAverageEnergyPerBand[1] <= MAX_ENERGY_BAND_1_BASE) {
+                return true;
+            } else if (mAverageEnergyPerBand[1] >= MIN_ENERGY_BAND_1) {
+                return true;
+            }
+            return false;
+        }
+
+        public boolean testInBand(int b) {
+            if (b >= 0 && b < mBands && mPointsPerBand[b] > 0) {
+                if ((double) mInBoundPointsPerBand[b] / mPointsPerBand[b] >
+                    MIN_FRACTION_POINTS_IN_BAND) {
+                        return true;
+                }
+            }
+            return false;
+        }
+
+        public boolean testAll() {
+            if (!testLevel()) {
+                return false;
+            }
+            for (int b = 0; b < mBands; b++) {
+                if (!testInBand(b)) {
+                    return false;
+                }
+            }
+            return true;
+        }
+    }
+
+
+    /**
+     * compute test1 results
+     */
+    private void computeTest1Results() {
+
+        Results resultsBase = new Results("Base");
+        if (computeResultsForVector(mFreqAverageBase, resultsBase, true, baseBandSpecsArray)) {
+            appendResultsToScreen(resultsBase.toString(), mTest1Result);
+            recordTestResults(resultsBase);
+        }
+
+        Results resultsBuiltIn = new Results("BuiltIn");
+        if (computeResultsForVector(mFreqAverageBuiltIn, resultsBuiltIn, false, bandSpecsArray)) {
+            appendResultsToScreen(resultsBuiltIn.toString(), mTest1Result);
+            recordTestResults(resultsBuiltIn);
+        }
+
+        //tell user to connect USB Microphone
+        appendResultsToScreen("\n\n" +
+                getResources().getText(R.string.audio_frequency_mic_connect_mic), mTest1Result);
+        enableLayout(mLayoutTest2a, true);
+    }
+
+    /**
+     * compute test results
+     */
+    private void computeTest2Results() {
+        Results resultsReference = new Results("Reference");
+        if (computeResultsForVector(mFreqAverageReference, resultsReference,
+                false, bandSpecsArray)) {
+            appendResultsToScreen(resultsReference.toString(),mTest2Result);
+            recordTestResults(resultsReference);
+            getPassButton().setEnabled(true);
+        }
+    }
+
+    private boolean computeResultsForVector(VectorAverage freqAverage, Results results,
+            boolean isBase, AudioBandSpecs[] bandSpecs) {
+
+        results.mIsBaseMeasurement = isBase;
+        int points = freqAverage.getSize();
+        if (points > 0) {
+            //compute vector in db
+            double[] values = new double[points];
+            freqAverage.getData(values, false);
+            results.mValuesLog = new double[points];
+            for (int i = 0; i < points; i++) {
+                results.mValuesLog[i] = 20 * Math.log10(values[i]);
+            }
+
+            int currentBand = 0;
+            for (int i = 0; i < points; i++) {
+                double freq = (double)mSamplingRate * i / (double)mBlockSizeSamples;
+                if (freq > bandSpecs[currentBand].mFreqStop) {
+                    currentBand++;
+                    if (currentBand >= mBands)
+                        break;
+                }
+
+                if (freq >= bandSpecs[currentBand].mFreqStart) {
+                    results.mAverageEnergyPerBand[currentBand] += results.mValuesLog[i];
+                    results.mPointsPerBand[currentBand]++;
+                }
+            }
+
+            for (int b = 0; b < mBands; b++) {
+                if (results.mPointsPerBand[b] > 0) {
+                    results.mAverageEnergyPerBand[b] =
+                            results.mAverageEnergyPerBand[b] / results.mPointsPerBand[b];
+                }
+            }
+
+            //set offset relative to band 1 level
+            for (int b = 0; b < mBands; b++) {
+                bandSpecs[b].setOffset(results.mAverageEnergyPerBand[1]);
+            }
+
+            //test points in band.
+            currentBand = 0;
+            for (int i = 0; i < points; i++) {
+                double freq = (double)mSamplingRate * i / (double)mBlockSizeSamples;
+                if (freq >  bandSpecs[currentBand].mFreqStop) {
+                    currentBand++;
+                    if (currentBand >= mBands)
+                        break;
+                }
+
+                if (freq >= bandSpecs[currentBand].mFreqStart) {
+                    double value = results.mValuesLog[i];
+                    if (bandSpecs[currentBand].isInBounds(freq, value)) {
+                        results.mInBoundPointsPerBand[currentBand]++;
+                    }
+                }
+            }
+            return true;
+        } else {
+            return false;
+        }
+    }
+
+    //append results
+    private void appendResultsToScreen(String str, TextView text) {
+        String currentText = text.getText().toString();
+        text.setText(currentText + "\n" + str);
+    }
+
+    /**
+     * Store test results in log
+     */
+    private void recordTestResults(Results results) {
+        String channelLabel = "channel_" + results.mLabel;
+
+        for (int b = 0; b < mBands; b++) {
+            String bandLabel = String.format(channelLabel + "_%d", b);
+            getReportLog().addValue(
+                    bandLabel + "_Level",
+                    results.mAverageEnergyPerBand[b],
+                    ResultType.HIGHER_BETTER,
+                    ResultUnit.NONE);
+
+            getReportLog().addValue(
+                    bandLabel + "_pointsinbound",
+                    results.mInBoundPointsPerBand[b],
+                    ResultType.HIGHER_BETTER,
+                    ResultUnit.COUNT);
+
+            getReportLog().addValue(
+                    bandLabel + "_pointstotal",
+                    results.mPointsPerBand[b],
+                    ResultType.NEUTRAL,
+                    ResultUnit.COUNT);
+        }
+
+        getReportLog().addValues(channelLabel + "_magnitudeSpectrumLog",
+                results.mValuesLog,
+                ResultType.NEUTRAL,
+                ResultUnit.NONE);
+
+        Log.v(TAG, "Results Recorded");
+    }
+
+    private void startRecording() {
+        synchronized (mRecordingLock) {
+            mIsRecording = true;
+        }
+
+        boolean successful = initRecord();
+        if (successful) {
+            startRecordingForReal();
+        } else {
+            Log.v(TAG, "Recorder initialization error.");
+            synchronized (mRecordingLock) {
+                mIsRecording = false;
+            }
+        }
+    }
+
+    private void startRecordingForReal() {
+        // start streaming
+        if (mRecordThread == null) {
+            mRecordThread = new Thread(AudioFrequencyMicActivity.this);
+            mRecordThread.setName("FrequencyAnalyzerThread");
+        }
+        if (!mRecordThread.isAlive()) {
+            mRecordThread.start();
+        }
+
+        mPipe.flush();
+
+        long startTime = SystemClock.uptimeMillis();
+        mRecorder.startRecording();
+        if (mRecorder.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
+            stopRecording();
+            return;
+        }
+        Log.v(TAG, "Start time: " + (long) (SystemClock.uptimeMillis() - startTime) + " ms");
+    }
+
+    private void stopRecording() {
+        synchronized (mRecordingLock) {
+            stopRecordingForReal();
+            mIsRecording = false;
+        }
+    }
+
+    private void stopRecordingForReal() {
+
+        // stop streaming
+        Thread zeThread = mRecordThread;
+        mRecordThread = null;
+        if (zeThread != null) {
+            zeThread.interrupt();
+            try {
+                zeThread.join();
+            } catch(InterruptedException e) {
+                //restore interrupted status of recording thread
+                zeThread.interrupt();
+            }
+        }
+         // release recording resources
+        if (mRecorder != null) {
+            mRecorder.stop();
+            mRecorder.release();
+            mRecorder = null;
+        }
+    }
+
+    private boolean initRecord() {
+        int minRecordBuffSizeInBytes = AudioRecord.getMinBufferSize(mSamplingRate,
+                mChannelConfig, mAudioFormat);
+        Log.v(TAG,"FrequencyAnalyzer: min buff size = " + minRecordBuffSizeInBytes + " bytes");
+        if (minRecordBuffSizeInBytes <= 0) {
+            return false;
+        }
+
+        mMinRecordBufferSizeInSamples = minRecordBuffSizeInBytes / 2;
+        // allocate the byte array to read the audio data
+
+        mAudioShortArray = new short[mMinRecordBufferSizeInSamples];
+
+        Log.v(TAG, "Initiating record:");
+        Log.v(TAG, "      using source " + mSelectedRecordSource);
+        Log.v(TAG, "      at " + mSamplingRate + "Hz");
+
+        try {
+            mRecorder = new AudioRecord(mSelectedRecordSource, mSamplingRate,
+                    mChannelConfig, mAudioFormat, 2 * minRecordBuffSizeInBytes);
+        } catch (IllegalArgumentException e) {
+            return false;
+        }
+        if (mRecorder.getState() != AudioRecord.STATE_INITIALIZED) {
+            mRecorder.release();
+            mRecorder = null;
+            return false;
+        }
+        mRecorder.setRecordPositionUpdateListener(this);
+        mRecorder.setPositionNotificationPeriod(mBlockSizeSamples / 2);
+        return true;
+    }
+
+    // ---------------------------------------------------------
+    // Implementation of AudioRecord.OnPeriodicNotificationListener
+    // --------------------
+    public void onPeriodicNotification(AudioRecord recorder) {
+        int samplesAvailable = mPipe.availableToRead();
+        int samplesNeeded = mBlockSizeSamples;
+        if (samplesAvailable >= samplesNeeded) {
+            mPipe.read(mAudioShortArray2, 0, samplesNeeded);
+
+            //compute stuff.
+            int clipcount = 0;
+            double sum = 0;
+            double maxabs = 0;
+            int i;
+
+            for (i = 0; i < samplesNeeded; i++) {
+                double value = mAudioShortArray2[i] / MAX_VAL;
+                double valueabs = Math.abs(value);
+
+                if (valueabs > maxabs) {
+                    maxabs = valueabs;
+                }
+
+                if (valueabs > CLIP_LEVEL) {
+                    clipcount++;
+                }
+
+                sum += value * value;
+                //fft stuff
+                mData.mData[i] = value;
+            }
+
+            //for the current frame, compute FFT and send to the viewer.
+
+            //apply window and pack as complex for now.
+            DspBufferMath.mult(mData, mData, mWindow.mBuffer);
+            DspBufferMath.set(mC, mData);
+            mFftServer.fft(mC, 1);
+
+            double[] halfMagnitude = new double[mBlockSizeSamples / 2];
+            for (i = 0; i < mBlockSizeSamples / 2; i++) {
+                halfMagnitude[i] = Math.sqrt(mC.mReal[i] * mC.mReal[i] + mC.mImag[i] * mC.mImag[i]);
+            }
+
+            mFreqAverageMain.setData(halfMagnitude, false); //average all of them!
+
+            switch(mCurrentTest) {
+                case 0:
+                    mFreqAverageBase.setData(halfMagnitude, false);
+                    break;
+                case 1:
+                    mFreqAverageBuiltIn.setData(halfMagnitude, false);
+                    break;
+                case 2:
+                    mFreqAverageReference.setData(halfMagnitude, false);
+                    break;
+            }
+        }
+    }
+
+    public void onMarkerReached(AudioRecord track) {
+    }
+
+    // ---------------------------------------------------------
+    // Implementation of Runnable for the audio recording + playback
+    // --------------------
+    public void run() {
+        Thread thisThread = Thread.currentThread();
+        while (!thisThread.isInterrupted()) {
+            // read from native recorder
+            int nSamplesRead = mRecorder.read(mAudioShortArray, 0, mMinRecordBufferSizeInSamples);
+            if (nSamplesRead > 0) {
+                mPipe.write(mAudioShortArray, 0, nSamplesRead);
+            }
+        }
+    }
+
+    private void testSpeakersReady() {
+        boolean isUsbConnected =
+                UsbMicrophoneTester.getIsMicrophoneConnected(getApplicationContext());
+        if (isUsbConnected) {
+            mSpeakerReadyText.setText(" USB device detected, please remove it");
+            enableLayout(mLayoutTest1, false);
+            //fail
+        } else {
+            mSpeakerReadyText.setText(" No USB device detected. OK");
+            enableLayout(mLayoutTest1, true);
+        }
+    }
+
+    private void testUSB() {
+        boolean isConnected = UsbMicrophoneTester.getIsMicrophoneConnected(getApplicationContext());
+        mUsbDevicesInfo = UsbMicrophoneTester.getUSBDeviceListString(getApplicationContext());
+
+        if (isConnected) {
+            mUsbStatusText.setText(
+                    getResources().getText(R.string.audio_frequency_mic_mic_ready_text));
+            enableLayout(mLayoutTest2b, true);
+        } else {
+            mUsbStatusText.setText(
+                    getResources().getText(R.string.audio_frequency_mic_mic_not_ready_text));
+            enableLayout(mLayoutTest2b, false);
+        }
+    }
+
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioFrequencySpeakerActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioFrequencySpeakerActivity.java
new file mode 100644
index 0000000..f9334b3
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioFrequencySpeakerActivity.java
@@ -0,0 +1,732 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cts.verifier.audio;
+
+import com.android.cts.verifier.PassFailButtons;
+import com.android.cts.verifier.R;
+import com.android.cts.verifier.audio.wavelib.*;
+import com.android.compatibility.common.util.ReportLog;
+import com.android.compatibility.common.util.ResultType;
+import com.android.compatibility.common.util.ResultUnit;
+import android.content.Context;
+import android.content.BroadcastReceiver;
+import android.content.Intent;
+import android.content.IntentFilter;
+
+import android.media.AudioDeviceCallback;
+import android.media.AudioDeviceInfo;
+import android.media.AudioFormat;
+import android.media.AudioManager;
+import android.media.AudioTrack;
+import android.media.AudioRecord;
+import android.media.MediaRecorder;
+
+import android.os.Bundle;
+import android.os.Handler;
+import android.os.Message;
+import android.os.SystemClock;
+
+import android.util.Log;
+
+import android.view.View;
+import android.view.View.OnClickListener;
+
+import android.widget.Button;
+import android.widget.TextView;
+import android.widget.SeekBar;
+import android.widget.LinearLayout;
+import android.widget.ProgressBar;
+
+/**
+ * Tests Audio Device roundtrip latency by using a loopback plug.
+ */
+public class AudioFrequencySpeakerActivity extends PassFailButtons.Activity implements Runnable,
+    AudioRecord.OnRecordPositionUpdateListener {
+    private static final String TAG = "AudioFrequencySpeakerActivity";
+
+    static final int TEST_STARTED = 900;
+    static final int TEST_ENDED = 901;
+    static final int TEST_MESSAGE = 902;
+    static final double MIN_ENERGY_BAND_1 = -50.0;          //dB Full Scale
+    static final double MAX_ENERGY_BAND_1_BASE = -60.0;     //dB Full Scale
+    static final double MIN_FRACTION_POINTS_IN_BAND = 0.3;
+
+    final OnBtnClickListener mBtnClickListener = new OnBtnClickListener();
+    Context mContext;
+
+    Button mLoopbackPlugReady;          //user signal to have connected USB Microphone
+    Button mTestButton;                 //user to start test
+    String mUsbDevicesInfo;             //usb device info for report
+    LinearLayout mLinearLayout;
+    TextView mResultText;
+    TextView mUsbStatusText;
+    ProgressBar mProgressBar;
+
+    private boolean mIsRecording = false;
+    private final Object mRecordingLock = new Object();
+    private AudioRecord mRecorder;
+    private int mMinRecordBufferSizeInSamples = 0;
+    private short[] mAudioShortArray;
+    private short[] mAudioShortArray2;
+
+    private final int mBlockSizeSamples = 1024;
+    private final int mSamplingRate = 48000;
+    private final int mSelectedRecordSource = MediaRecorder.AudioSource.VOICE_RECOGNITION;
+    private final int mChannelConfig = AudioFormat.CHANNEL_IN_MONO;
+    private final int mAudioFormat = AudioFormat.ENCODING_PCM_16BIT;
+    private Thread mRecordThread;
+    private boolean mRecordThreadShutdown = false;
+
+    PipeShort mPipe = new PipeShort(65536);
+    SoundPlayerObject mSPlayer;
+
+    private DspBufferComplex mC;
+    private DspBufferDouble mData;
+
+    private DspWindow mWindow;
+    private DspFftServer mFftServer;
+    private VectorAverage mFreqAverageMain = new VectorAverage();
+
+    private VectorAverage mFreqAverageBase = new VectorAverage();
+    private VectorAverage mFreqAverageLeft = new VectorAverage();
+    private VectorAverage mFreqAverageRight = new VectorAverage();
+
+    private int mCurrentTest = -1;
+    int mBands = 4;
+    AudioBandSpecs[] bandSpecsArray = new AudioBandSpecs[mBands];
+    AudioBandSpecs[] baseBandSpecsArray = new AudioBandSpecs[mBands];
+
+    int mMaxLevel;
+    private class OnBtnClickListener implements OnClickListener {
+        @Override
+        public void onClick(View v) {
+            switch (v.getId()) {
+            case R.id.audio_frequency_speaker_mic_ready_btn:
+                testUSB();
+                break;
+            case R.id.audio_frequency_speaker_test_btn:
+                startAudioTest();
+                break;
+            }
+        }
+    }
+
+    @Override
+    protected void onCreate(Bundle savedInstanceState) {
+        super.onCreate(savedInstanceState);
+        setContentView(R.layout.audio_frequency_speaker_activity);
+
+        mContext = this;
+
+        mLoopbackPlugReady = (Button)findViewById(R.id.audio_frequency_speaker_mic_ready_btn);
+        mLoopbackPlugReady.setOnClickListener(mBtnClickListener);
+        mLinearLayout = (LinearLayout)findViewById(R.id.audio_frequency_speaker_layout);
+        mUsbStatusText = (TextView)findViewById(R.id.audio_frequency_speaker_usb_status);
+        mTestButton = (Button)findViewById(R.id.audio_frequency_speaker_test_btn);
+        mTestButton.setOnClickListener(mBtnClickListener);
+        mResultText = (TextView)findViewById(R.id.audio_frequency_speaker_results_text);
+        mProgressBar = (ProgressBar)findViewById(R.id.audio_frequency_speaker_progress_bar);
+        showWait(false);
+        enableLayout(false);         //disabled all content
+
+        mSPlayer = new SoundPlayerObject();
+        mSPlayer.setSoundWithResId(getApplicationContext(), R.raw.stereo_mono_white_noise_48);
+        mSPlayer.setBalance(0.5f);
+
+        //Init FFT stuff
+        mAudioShortArray2 = new short[mBlockSizeSamples*2];
+        mData = new DspBufferDouble(mBlockSizeSamples);
+        mC = new DspBufferComplex(mBlockSizeSamples);
+        mFftServer = new DspFftServer(mBlockSizeSamples);
+
+        int overlap = mBlockSizeSamples / 2;
+
+        mWindow = new DspWindow(DspWindow.WINDOW_HANNING, mBlockSizeSamples, overlap);
+
+        setPassFailButtonClickListeners();
+        getPassButton().setEnabled(false);
+        setInfoResources(R.string.audio_frequency_speaker_test,
+                R.string.audio_frequency_speaker_info, -1);
+
+        //Init bands for Left/Right test
+        bandSpecsArray[0] = new AudioBandSpecs(
+                50, 500,        /* frequency start,stop */
+                -20.0, -50,     /* start top,bottom value */
+                4.0, -4.0       /* stop top,bottom value */);
+
+        bandSpecsArray[1] = new AudioBandSpecs(
+                500,4000,       /* frequency start,stop */
+                4.0, -4.0,      /* start top,bottom value */
+                4.0, -4.0        /* stop top,bottom value */);
+
+        bandSpecsArray[2] = new AudioBandSpecs(
+                4000, 12000,    /* frequency start,stop */
+                4.0, -4.0,      /* start top,bottom value */
+                5.0, -5.0       /* stop top,bottom value */);
+
+        bandSpecsArray[3] = new AudioBandSpecs(
+                12000, 20000,   /* frequency start,stop */
+                5.0, -5.0,      /* start top,bottom value */
+                5.0, -30.0      /* stop top,bottom value */);
+
+        //Init base bands for silence
+        baseBandSpecsArray[0] = new AudioBandSpecs(
+                50, 500,        /* frequency start,stop */
+                40.0, -50.0,     /* start top,bottom value */
+                5.0, -50.0       /* stop top,bottom value */);
+
+        baseBandSpecsArray[1] = new AudioBandSpecs(
+                500,4000,       /* frequency start,stop */
+                5.0, -50.0,      /* start top,bottom value */
+                5.0, -50.0        /* stop top,bottom value */);
+
+        baseBandSpecsArray[2] = new AudioBandSpecs(
+                4000, 12000,    /* frequency start,stop */
+                5.0, -50.0,      /* start top,bottom value */
+                5.0, -50.0       /* stop top,bottom value */);
+
+        baseBandSpecsArray[3] = new AudioBandSpecs(
+                12000, 20000,   /* frequency start,stop */
+                5.0, -50.0,      /* start top,bottom value */
+                5.0, -50.0      /* stop top,bottom value */);
+
+    }
+
+    /**
+     * enable test ui elements
+     */
+    private void enableLayout(boolean enable) {
+        for (int i = 0; i < mLinearLayout.getChildCount(); i++) {
+            View view = mLinearLayout.getChildAt(i);
+            view.setEnabled(enable);
+        }
+    }
+
+    /**
+     * show active progress bar
+     */
+    private void showWait(boolean show) {
+        if (show) {
+            mProgressBar.setVisibility(View.VISIBLE);
+        } else {
+            mProgressBar.setVisibility(View.INVISIBLE);
+        }
+    }
+
+    private void setMaxLevel() {
+        AudioManager am = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
+        mMaxLevel = am.getStreamMaxVolume(AudioManager.STREAM_MUSIC);
+        am.setStreamVolume(AudioManager.STREAM_MUSIC, (int)(mMaxLevel), 0);
+    }
+
+    private void setMinLevel() {
+        AudioManager am = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
+        am.setStreamVolume(AudioManager.STREAM_MUSIC, 0, 0);
+    }
+
+    /**
+     *  Start the loopback audio test
+     */
+    private void startAudioTest() {
+        if (mTestThread != null && !mTestThread.isAlive()) {
+            mTestThread = null; //kill it.
+        }
+
+        if (mTestThread == null) {
+            Log.v(TAG,"Executing test Thread");
+            mTestThread = new Thread(mPlayRunnable);
+            getPassButton().setEnabled(false);
+            if (!mSPlayer.isAlive())
+                mSPlayer.start();
+            mTestThread.start();
+        } else {
+            Log.v(TAG,"test Thread already running.");
+        }
+    }
+
+    Thread mTestThread;
+    Runnable mPlayRunnable = new Runnable() {
+        public void run() {
+            Message msg = Message.obtain();
+            msg.what = TEST_STARTED;
+            mMessageHandler.sendMessage(msg);
+
+            setMinLevel();
+            sendMessage("Testing Background Environment");
+            mCurrentTest = 0;
+            mSPlayer.setBalance(0.5f);
+            mFreqAverageBase.reset();
+            play();
+
+            setMaxLevel();
+            sendMessage("Testing Left Capture");
+            mCurrentTest = 1;
+            mFreqAverageLeft.reset();
+            mSPlayer.setBalance(0.0f);
+            play();
+
+            sendMessage("Testing Right Capture");
+            mCurrentTest = 2;
+            mFreqAverageRight.reset();
+            mSPlayer.setBalance(1.0f);
+            play();
+
+            mCurrentTest = -1;
+            sendMessage("Testing Completed");
+
+            Message msg2 = Message.obtain();
+            msg2.what = TEST_ENDED;
+            mMessageHandler.sendMessage(msg2);
+        }
+
+        private void play() {
+            startRecording();
+            mSPlayer.play(true);
+
+            try {
+                Thread.sleep(2000);
+            } catch (InterruptedException e) {
+                e.printStackTrace();
+            }
+
+            mSPlayer.play(false);
+            stopRecording();
+        }
+
+        private void sendMessage(String str) {
+            Message msg = Message.obtain();
+            msg.what = TEST_MESSAGE;
+            msg.obj = str;
+            mMessageHandler.sendMessage(msg);
+        }
+    };
+
+    private Handler mMessageHandler = new Handler() {
+        public void handleMessage(Message msg) {
+            super.handleMessage(msg);
+            switch (msg.what) {
+            case TEST_STARTED:
+                showWait(true);
+                getPassButton().setEnabled(false);
+                break;
+            case TEST_ENDED:
+                showWait(false);
+                computeResults();
+                break;
+            case TEST_MESSAGE:
+                String str = (String)msg.obj;
+                if (str != null) {
+                    mResultText.setText(str);
+                }
+                break;
+            default:
+                Log.e(TAG, String.format("Unknown message: %d", msg.what));
+            }
+        }
+    };
+
+    private class Results {
+        private String mLabel;
+        public double[] mValuesLog;
+        int[] mPointsPerBand = new int[mBands];
+        double[] mAverageEnergyPerBand = new double[mBands];
+        int[] mInBoundPointsPerBand = new int[mBands];
+        public boolean mIsBaseMeasurement = false;
+        public Results(String label) {
+            mLabel = label;
+        }
+
+        //append results
+        public String toString() {
+            StringBuilder sb = new StringBuilder();
+            sb.append(String.format("Channel %s\n", mLabel));
+            sb.append("Level in Band 1 : " + (testLevel() ? "OK" :"FAILED") +
+                    (mIsBaseMeasurement ? " (Base Meas.)" : "") + "\n");
+            for (int b = 0; b < mBands; b++) {
+                double percent = 0;
+                if (mPointsPerBand[b] > 0) {
+                    percent = 100.0 * (double)mInBoundPointsPerBand[b] / mPointsPerBand[b];
+                }
+                sb.append(String.format(
+                        " Band %d: Av. Level: %.1f dB InBand: %d/%d (%.1f%%) %s\n",
+                        b, mAverageEnergyPerBand[b],
+                        mInBoundPointsPerBand[b],
+                        mPointsPerBand[b],
+                        percent,
+                        (testInBand(b) ? "OK" : "FAILED")));
+            }
+            return sb.toString();
+        }
+
+        public boolean testLevel() {
+            if (mIsBaseMeasurement && mAverageEnergyPerBand[1] <= MAX_ENERGY_BAND_1_BASE) {
+                return true;
+            } else if (mAverageEnergyPerBand[1] >= MIN_ENERGY_BAND_1) {
+                return true;
+            }
+            return false;
+        }
+
+        public boolean testInBand(int b) {
+            if (b >= 0 && b < mBands && mPointsPerBand[b] > 0) {
+                if ((double)mInBoundPointsPerBand[b] / mPointsPerBand[b] >
+                MIN_FRACTION_POINTS_IN_BAND)
+                    return true;
+            }
+            return false;
+        }
+
+        public boolean testAll() {
+            if (!testLevel()) {
+                return false;
+            }
+            for (int b = 0; b < mBands; b++) {
+                if (!testInBand(b)) {
+                    return false;
+                }
+            }
+            return true;
+        }
+    }
+
+    /**
+     * compute test results
+     */
+    private void computeResults() {
+
+        Results resultsBase = new Results("Base");
+        computeResultsForVector(mFreqAverageBase, resultsBase, true, baseBandSpecsArray);
+        Results resultsLeft = new Results("Left");
+        computeResultsForVector(mFreqAverageLeft, resultsLeft, false, bandSpecsArray);
+        Results resultsRight = new Results("Right");
+        computeResultsForVector(mFreqAverageRight, resultsRight, false, bandSpecsArray);
+        if (resultsLeft.testAll() && resultsRight.testAll() && resultsBase.testAll()) {
+            //enable button
+            getPassButton().setEnabled(true);
+        }
+    }
+
+    private void computeResultsForVector(VectorAverage freqAverage,Results results, boolean isBase,
+            AudioBandSpecs[] bandSpecs) {
+
+        results.mIsBaseMeasurement = isBase;
+        int points = freqAverage.getSize();
+        if (points > 0) {
+            //compute vector in db
+            double[] values = new double[points];
+            freqAverage.getData(values, false);
+            results.mValuesLog = new double[points];
+            for (int i = 0; i < points; i++) {
+                results.mValuesLog[i] = 20 * Math.log10(values[i]);
+            }
+
+            int currentBand = 0;
+            for (int i = 0; i < points; i++) {
+                double freq = (double)mSamplingRate * i / (double)mBlockSizeSamples;
+                if (freq > bandSpecs[currentBand].mFreqStop) {
+                    currentBand++;
+                    if (currentBand >= mBands)
+                        break;
+                }
+
+                if (freq >= bandSpecs[currentBand].mFreqStart) {
+                    results.mAverageEnergyPerBand[currentBand] += results.mValuesLog[i];
+                    results.mPointsPerBand[currentBand]++;
+                }
+            }
+
+            for (int b = 0; b < mBands; b++) {
+                if (results.mPointsPerBand[b] > 0) {
+                    results.mAverageEnergyPerBand[b] =
+                            results.mAverageEnergyPerBand[b] / results.mPointsPerBand[b];
+                }
+            }
+
+            //set offset relative to band 1 level
+            for (int b = 0; b < mBands; b++) {
+                bandSpecs[b].setOffset(results.mAverageEnergyPerBand[1]);
+            }
+
+            //test points in band.
+            currentBand = 0;
+            for (int i = 0; i < points; i++) {
+                double freq = (double)mSamplingRate * i / (double)mBlockSizeSamples;
+                if (freq >  bandSpecs[currentBand].mFreqStop) {
+                    currentBand++;
+                    if (currentBand >= mBands)
+                        break;
+                }
+
+                if (freq >= bandSpecs[currentBand].mFreqStart) {
+                    double value = results.mValuesLog[i];
+                    if (bandSpecs[currentBand].isInBounds(freq, value)) {
+                        results.mInBoundPointsPerBand[currentBand]++;
+                    }
+                }
+            }
+
+            appendResultsToScreen(results.toString());
+            //store results
+            recordTestResults(results);
+        } else {
+            appendResultsToScreen("Failed testing channel " + results.mLabel);
+        }
+    }
+
+    //append results
+    private void appendResultsToScreen(String str) {
+        String currentText = mResultText.getText().toString();
+        mResultText.setText(currentText + "\n" + str);
+    }
+
+    /**
+     * Store test results in log
+     */
+    private void recordTestResults(Results results) {
+        String channelLabel = "channel_" + results.mLabel;
+
+        for (int b = 0; b < mBands; b++) {
+            String bandLabel = String.format(channelLabel + "_%d", b);
+            getReportLog().addValue(
+                    bandLabel + "_Level",
+                    results.mAverageEnergyPerBand[b],
+                    ResultType.HIGHER_BETTER,
+                    ResultUnit.NONE);
+
+            getReportLog().addValue(
+                    bandLabel + "_pointsinbound",
+                    results.mInBoundPointsPerBand[b],
+                    ResultType.HIGHER_BETTER,
+                    ResultUnit.COUNT);
+
+            getReportLog().addValue(
+                    bandLabel + "_pointstotal",
+                    results.mPointsPerBand[b],
+                    ResultType.NEUTRAL,
+                    ResultUnit.COUNT);
+        }
+
+        getReportLog().addValues(channelLabel + "_magnitudeSpectrumLog",
+                results.mValuesLog,
+                ResultType.NEUTRAL,
+                ResultUnit.NONE);
+
+        Log.v(TAG, "Results Recorded");
+    }
+
+    private void startRecording() {
+        synchronized (mRecordingLock) {
+            mIsRecording = true;
+        }
+
+        boolean successful = initRecord();
+        if (successful) {
+            startRecordingForReal();
+        } else {
+            Log.v(TAG, "Recorder initialization error.");
+            synchronized (mRecordingLock) {
+                mIsRecording = false;
+            }
+        }
+    }
+
+    private void startRecordingForReal() {
+        // start streaming
+        if (mRecordThread == null) {
+            mRecordThread = new Thread(AudioFrequencySpeakerActivity.this);
+            mRecordThread.setName("FrequencyAnalyzerThread");
+            mRecordThreadShutdown = false;
+        }
+        if (!mRecordThread.isAlive()) {
+            mRecordThread.start();
+        }
+
+        mPipe.flush();
+
+        long startTime = SystemClock.uptimeMillis();
+        mRecorder.startRecording();
+        if (mRecorder.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
+            stopRecording();
+            return;
+        }
+        Log.v(TAG, "Start time: " + (long) (SystemClock.uptimeMillis() - startTime) + " ms");
+    }
+
+    private void stopRecording() {
+        synchronized (mRecordingLock) {
+            stopRecordingForReal();
+            mIsRecording = false;
+        }
+    }
+
+    private void stopRecordingForReal() {
+
+        // stop streaming
+        Thread zeThread = mRecordThread;
+        mRecordThread = null;
+        mRecordThreadShutdown = true;
+        if (zeThread != null) {
+            zeThread.interrupt();
+            try {
+                zeThread.join();
+            } catch(InterruptedException e) {
+                Log.v(TAG,"Error shutting down recording thread " + e);
+                //we don't really care about this error, just logging it.
+            }
+        }
+         // release recording resources
+        if (mRecorder != null) {
+            mRecorder.stop();
+            mRecorder.release();
+            mRecorder = null;
+        }
+    }
+
+    private boolean initRecord() {
+        int minRecordBuffSizeInBytes = AudioRecord.getMinBufferSize(mSamplingRate,
+                mChannelConfig, mAudioFormat);
+        Log.v(TAG,"FrequencyAnalyzer: min buff size = " + minRecordBuffSizeInBytes + " bytes");
+        if (minRecordBuffSizeInBytes <= 0) {
+            return false;
+        }
+
+        mMinRecordBufferSizeInSamples = minRecordBuffSizeInBytes / 2;
+        // allocate the byte array to read the audio data
+
+        mAudioShortArray = new short[mMinRecordBufferSizeInSamples];
+
+        Log.v(TAG, "Initiating record:");
+        Log.v(TAG, "      using source " + mSelectedRecordSource);
+        Log.v(TAG, "      at " + mSamplingRate + "Hz");
+
+        try {
+            mRecorder = new AudioRecord(mSelectedRecordSource, mSamplingRate,
+                    mChannelConfig, mAudioFormat, 2 * minRecordBuffSizeInBytes);
+        } catch (IllegalArgumentException e) {
+            return false;
+        }
+        if (mRecorder.getState() != AudioRecord.STATE_INITIALIZED) {
+            mRecorder.release();
+            mRecorder = null;
+            return false;
+        }
+        mRecorder.setRecordPositionUpdateListener(this);
+        mRecorder.setPositionNotificationPeriod(mBlockSizeSamples / 2);
+        return true;
+    }
+
+    // ---------------------------------------------------------
+    // Implementation of AudioRecord.OnPeriodicNotificationListener
+    // --------------------
+    public void onPeriodicNotification(AudioRecord recorder) {
+        int samplesAvailable = mPipe.availableToRead();
+        int samplesNeeded = mBlockSizeSamples;
+        if (samplesAvailable >= samplesNeeded) {
+            mPipe.read(mAudioShortArray2, 0, samplesNeeded);
+
+            //compute stuff.
+            double maxval = Math.pow(2, 15);
+            int clipcount = 0;
+            double cliplevel = (maxval-10) / maxval;
+            double sum = 0;
+            double maxabs = 0;
+            int i;
+            int index = 0;
+
+            for (i = 0; i < samplesNeeded; i++) {
+                double value = mAudioShortArray2[i] / maxval;
+                double valueabs = Math.abs(value);
+
+                if (valueabs > maxabs) {
+                    maxabs = valueabs;
+                }
+
+                if (valueabs > cliplevel) {
+                    clipcount++;
+                }
+
+                sum += value * value;
+                //fft stuff
+                if (index < mBlockSizeSamples) {
+                    mData.mData[index] = value;
+                }
+                index++;
+            }
+
+            //for the current frame, compute FFT and send to the viewer.
+
+            //apply window and pack as complex for now.
+            DspBufferMath.mult(mData, mData, mWindow.mBuffer);
+            DspBufferMath.set(mC, mData);
+            mFftServer.fft(mC, 1);
+
+            double[] halfMagnitude = new double[mBlockSizeSamples / 2];
+            for (i = 0; i < mBlockSizeSamples / 2; i++) {
+                halfMagnitude[i] = Math.sqrt(mC.mReal[i] * mC.mReal[i] + mC.mImag[i] * mC.mImag[i]);
+            }
+
+            mFreqAverageMain.setData(halfMagnitude, false); //average all of them!
+
+            switch(mCurrentTest) {
+                case 0:
+                    mFreqAverageBase.setData(halfMagnitude, false);
+                    break;
+                case 1:
+                    mFreqAverageLeft.setData(halfMagnitude, false);
+                    break;
+                case 2:
+                    mFreqAverageRight.setData(halfMagnitude, false);
+                    break;
+            }
+        }
+    }
+
+    public void onMarkerReached(AudioRecord track) {
+    }
+
+    // ---------------------------------------------------------
+    // Implementation of Runnable for the audio recording + playback
+    // --------------------
+    public void run() {
+        int nSamplesRead = 0;
+
+        Thread thisThread = Thread.currentThread();
+        while (mRecordThread == thisThread && !mRecordThreadShutdown) {
+            // read from native recorder
+            nSamplesRead = mRecorder.read(mAudioShortArray, 0, mMinRecordBufferSizeInSamples);
+            if (nSamplesRead > 0) {
+                mPipe.write(mAudioShortArray, 0, nSamplesRead);
+            }
+        }
+    }
+
+    private void testUSB() {
+        boolean isConnected = UsbMicrophoneTester.getIsMicrophoneConnected(getApplicationContext());
+        mUsbDevicesInfo = UsbMicrophoneTester.getUSBDeviceListString(getApplicationContext());
+
+        if (isConnected) {
+            mUsbStatusText.setText(
+                    getResources().getText(R.string.audio_frequency_speaker_mic_ready_text));
+            enableLayout(true);
+        } else {
+            mUsbStatusText.setText(
+                    getResources().getText(R.string.audio_frequency_speaker_mic_not_ready_text));
+            enableLayout(false);
+        }
+    }
+
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioLoopbackActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioLoopbackActivity.java
new file mode 100644
index 0000000..e603a69
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioLoopbackActivity.java
@@ -0,0 +1,307 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cts.verifier.audio;
+
+import com.android.cts.verifier.PassFailButtons;
+import com.android.cts.verifier.R;
+import com.android.compatibility.common.util.ReportLog;
+import com.android.compatibility.common.util.ResultType;
+import com.android.compatibility.common.util.ResultUnit;
+import android.content.Context;
+
+import android.media.AudioDeviceCallback;
+import android.media.AudioDeviceInfo;
+import android.media.AudioManager;
+import android.media.AudioTrack;
+
+import android.os.Bundle;
+import android.os.Handler;
+import android.os.Message;
+
+import android.util.Log;
+
+import android.view.View;
+import android.view.View.OnClickListener;
+
+import android.widget.Button;
+import android.widget.TextView;
+import android.widget.SeekBar;
+import android.widget.LinearLayout;
+import android.widget.ProgressBar;
+
+/**
+ * Tests Audio Device roundtrip latency by using a loopback plug.
+ */
+public class AudioLoopbackActivity extends PassFailButtons.Activity {
+    private static final String TAG = "AudioLoopbackActivity";
+
+    public static final int BYTES_PER_FRAME = 2;
+
+    NativeAudioThread nativeAudioThread = null;
+
+    private int mSamplingRate = 44100;
+    private int mMinBufferSizeInFrames = 0;
+    private static final double CONFIDENCE_THRESHOLD = 0.6;
+    private Correlation mCorrelation = new Correlation();
+
+    OnBtnClickListener mBtnClickListener = new OnBtnClickListener();
+    Context mContext;
+
+    Button mLoopbackPlugReady;
+    TextView mAudioLevelText;
+    SeekBar mAudioLevelSeekbar;
+    LinearLayout mLinearLayout;
+    Button mTestButton;
+    TextView mResultText;
+    ProgressBar mProgressBar;
+
+    int mMaxLevel;
+    private class OnBtnClickListener implements OnClickListener {
+        @Override
+        public void onClick(View v) {
+            switch (v.getId()) {
+                case R.id.audio_loopback_plug_ready_btn:
+                    Log.i(TAG, "audio loopback plug ready");
+                    //enable all the other views.
+                    enableLayout(true);
+                    break;
+                case R.id.audio_loopback_test_btn:
+                    Log.i(TAG, "audio loopback test");
+                    startAudioTest();
+                    break;
+
+            }
+        }
+    }
+
+    @Override
+    protected void onCreate(Bundle savedInstanceState) {
+        super.onCreate(savedInstanceState);
+        setContentView(R.layout.audio_loopback_activity);
+
+        mContext = this;
+
+        mLoopbackPlugReady = (Button)findViewById(R.id.audio_loopback_plug_ready_btn);
+        mLoopbackPlugReady.setOnClickListener(mBtnClickListener);
+        mLinearLayout = (LinearLayout)findViewById(R.id.audio_loopback_layout);
+        mAudioLevelText = (TextView)findViewById(R.id.audio_loopback_level_text);
+        mAudioLevelSeekbar = (SeekBar)findViewById(R.id.audio_loopback_level_seekbar);
+        mTestButton =(Button)findViewById(R.id.audio_loopback_test_btn);
+        mTestButton.setOnClickListener(mBtnClickListener);
+        mResultText = (TextView)findViewById(R.id.audio_loopback_results_text);
+        mProgressBar = (ProgressBar)findViewById(R.id.audio_loopback_progress_bar);
+        showWait(false);
+
+        enableLayout(false);         //disabled all content
+        AudioManager am = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
+        mMaxLevel = am.getStreamMaxVolume(AudioManager.STREAM_MUSIC);
+        mAudioLevelSeekbar.setMax(mMaxLevel);
+        am.setStreamVolume(AudioManager.STREAM_MUSIC, (int)(0.7 * mMaxLevel), 0);
+        refreshLevel();
+
+        mAudioLevelSeekbar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
+            @Override
+            public void onStopTrackingTouch(SeekBar seekBar) {
+            }
+
+            @Override
+            public void onStartTrackingTouch(SeekBar seekBar) {
+            }
+
+            @Override
+            public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
+
+                AudioManager am = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
+                am.setStreamVolume(AudioManager.STREAM_MUSIC,
+                        progress, 0);
+                refreshLevel();
+                Log.i(TAG,"Changed stream volume to: " + progress);
+            }
+        });
+
+        setPassFailButtonClickListeners();
+        getPassButton().setEnabled(false);
+        setInfoResources(R.string.sample_test, R.string.audio_loopback_info, -1);
+    }
+
+    /**
+     * refresh Audio Level seekbar and text
+     */
+    private void refreshLevel() {
+        AudioManager am = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
+
+        int currentLevel = am.getStreamVolume(AudioManager.STREAM_MUSIC);
+        mAudioLevelSeekbar.setProgress(currentLevel);
+
+        String levelText = String.format("%s: %d/%d",
+                getResources().getString(R.string.audio_loopback_level_text),
+                currentLevel, mMaxLevel);
+        mAudioLevelText.setText(levelText);
+    }
+
+    /**
+     * enable test ui elements
+     */
+    private void enableLayout(boolean enable) {
+        for (int i = 0; i<mLinearLayout.getChildCount(); i++) {
+            View view = mLinearLayout.getChildAt(i);
+            view.setEnabled(enable);
+        }
+    }
+
+    /**
+     * show active progress bar
+     */
+    private void showWait(boolean show) {
+        if (show) {
+            mProgressBar.setVisibility(View.VISIBLE) ;
+        } else {
+            mProgressBar.setVisibility(View.INVISIBLE) ;
+        }
+    }
+
+    /**
+     *  Start the loopback audio test
+     */
+    private void startAudioTest() {
+        getPassButton().setEnabled(false);
+
+        //get system defaults for sampling rate, buffers.
+        AudioManager am = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
+        String value = am.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER);
+        mMinBufferSizeInFrames = Integer.parseInt(value);
+
+        int minBufferSizeInBytes = BYTES_PER_FRAME * mMinBufferSizeInFrames;
+
+        mSamplingRate = AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_MUSIC);
+
+        Log.i(TAG, String.format("startAudioTest sr:%d , buffer:%d frames",
+                mSamplingRate, mMinBufferSizeInFrames));
+
+        nativeAudioThread = new NativeAudioThread();
+        if (nativeAudioThread != null) {
+            nativeAudioThread.setMessageHandler(mMessageHandler);
+            nativeAudioThread.mSessionId = 0;
+            nativeAudioThread.setParams(mSamplingRate,
+                    minBufferSizeInBytes,
+                    minBufferSizeInBytes,
+                    0x03 /*voice recognition*/);
+            nativeAudioThread.start();
+
+            try {
+                Thread.sleep(200);
+            } catch (InterruptedException e) {
+                e.printStackTrace();
+            }
+
+            nativeAudioThread.runTest();
+
+        }
+    }
+
+    /**
+     * handler for messages from audio thread
+     */
+    private Handler mMessageHandler = new Handler() {
+        public void handleMessage(Message msg) {
+            super.handleMessage(msg);
+            switch(msg.what) {
+                case NativeAudioThread.NATIVE_AUDIO_THREAD_MESSAGE_REC_STARTED:
+                    Log.v(TAG,"got message native rec started!!");
+                    showWait(true);
+                    mResultText.setText("Test Running...");
+                    break;
+                case NativeAudioThread.NATIVE_AUDIO_THREAD_MESSAGE_REC_ERROR:
+                    Log.v(TAG,"got message native rec can't start!!");
+                    showWait(false);
+                    mResultText.setText("Test Error.");
+                    break;
+                case NativeAudioThread.NATIVE_AUDIO_THREAD_MESSAGE_REC_COMPLETE:
+                case NativeAudioThread.NATIVE_AUDIO_THREAD_MESSAGE_REC_COMPLETE_ERRORS:
+                    if (nativeAudioThread != null) {
+                        Log.v(TAG,"Finished recording.");
+                        double [] waveData = nativeAudioThread.getWaveData();
+                        mCorrelation.computeCorrelation(waveData, mSamplingRate);
+                        mResultText.setText(String.format(
+                                "Test Finished\nLatency:%.2f ms\nConfidence: %.2f",
+                                mCorrelation.mEstimatedLatencyMs,
+                                mCorrelation.mEstimatedLatencyConfidence));
+
+                        recordTestResults();
+                        if (mCorrelation.mEstimatedLatencyConfidence >= CONFIDENCE_THRESHOLD) {
+                            getPassButton().setEnabled(true);
+                        }
+
+                        //close
+                        if (nativeAudioThread != null) {
+                            nativeAudioThread.isRunning = false;
+                            try {
+                                nativeAudioThread.finish();
+                                nativeAudioThread.join();
+                            } catch (InterruptedException e) {
+                                e.printStackTrace();
+                            }
+                            nativeAudioThread = null;
+                        }
+                        showWait(false);
+                    }
+                    break;
+                default:
+                    break;
+            }
+        }
+    };
+
+    /**
+     * Store test results in log
+     */
+    private void recordTestResults() {
+
+        getReportLog().addValue(
+                "Estimated Latency",
+                mCorrelation.mEstimatedLatencyMs,
+                ResultType.LOWER_BETTER,
+                ResultUnit.MS);
+
+        getReportLog().addValue(
+                "Confidence",
+                mCorrelation.mEstimatedLatencyConfidence,
+                ResultType.HIGHER_BETTER,
+                ResultUnit.NONE);
+
+        int audioLevel = mAudioLevelSeekbar.getProgress();
+        getReportLog().addValue(
+                "Audio Level",
+                audioLevel,
+                ResultType.NEUTRAL,
+                ResultUnit.NONE);
+
+        getReportLog().addValue(
+                "Frames Buffer Size",
+                mMinBufferSizeInFrames,
+                ResultType.NEUTRAL,
+                ResultUnit.NONE);
+
+        getReportLog().addValue(
+                "Sampling Rate",
+                mSamplingRate,
+                ResultType.NEUTRAL,
+                ResultUnit.NONE);
+
+        Log.v(TAG,"Results Recorded");
+    }
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioRecordHelper.java b/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioRecordHelper.java
new file mode 100644
index 0000000..80dd250
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioRecordHelper.java
@@ -0,0 +1,144 @@
+package com.android.cts.verifier.audio;
+
+import android.media.AudioFormat;
+import android.media.AudioManager;
+import android.media.AudioRecord;
+import android.media.AudioTrack;
+import android.media.MediaRecorder;
+import android.util.Log;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+
+/**
+ * A wrapper on AudioRecord class.
+ */
+public class AudioRecordHelper {
+
+  private static final int[] SOURCE = {
+      MediaRecorder.AudioSource.MIC, MediaRecorder.AudioSource.VOICE_RECOGNITION};
+  private static final int[] SAMPLE_RATES_HZ = {
+    AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_MUSIC), 48000, 44100};
+
+  private static final int CHANNEL = AudioFormat.CHANNEL_CONFIGURATION_MONO;
+  private static final int ENCODING = AudioFormat.ENCODING_PCM_16BIT;
+  private static final String TAG = "AudioRecordHelper";
+  private static AudioRecordHelper instance;
+  private final int bufferSize;
+  private final int sampleRate;
+  private final int source;
+  private ByteArrayOutputStream os;
+  private AudioRecord audioRecord;
+  private volatile boolean isRecording = false;
+
+  private AudioRecordHelper() {
+    int tmpBufferSize = 0;
+    int tmpSampleRate = 0;
+    int tmpSource = 0;
+    initialization:
+    for (int source : SOURCE) {
+      for (int rate : SAMPLE_RATES_HZ) {
+        tmpBufferSize = AudioRecord.getMinBufferSize(rate, CHANNEL, ENCODING);
+        AudioRecord testAudioRecord = new AudioRecord(source, rate, CHANNEL, ENCODING,
+            tmpBufferSize);
+        if (testAudioRecord.getState() == AudioRecord.STATE_INITIALIZED) {
+          testAudioRecord.release();
+          tmpSampleRate = rate;
+          tmpSource = source;
+          break initialization;
+        }
+      }
+    }
+    if (tmpBufferSize == 0 || tmpSampleRate == 0) {
+      Log.e(TAG, "Failed to initialize");
+    }
+    bufferSize = tmpBufferSize;
+    sampleRate = tmpSampleRate;
+    source = tmpSource;
+    Log.d(TAG, "Sample rate = " + sampleRate + "Hz, Source = "
+        + source + " (VOICE_RECOGNITION = 6 , MIC = 1)");
+  }
+
+  public static AudioRecordHelper getInstance() {
+    if (instance == null) {
+      instance = new AudioRecordHelper();
+    }
+    return instance;
+  }
+
+  /**
+   * Start recording.
+   */
+  public void start() {
+    if (!isRecording) {
+      isRecording = true;
+      os = new ByteArrayOutputStream();
+      audioRecord = new AudioRecord(source, sampleRate, CHANNEL, ENCODING, bufferSize);
+      audioRecord.startRecording();
+      startPullingData();
+    }
+  }
+
+  /**
+   * Stop recording
+   */
+  public void stop() {
+    if (isRecording) {
+      isRecording = false;
+      audioRecord.stop();
+      audioRecord.release();
+      audioRecord = null;
+      try {
+        os.close();
+      } catch (IOException e) {
+        e.printStackTrace();
+      }
+    }
+  }
+
+  private void startPullingData() {
+    new Thread() {
+      @Override
+      public void run(){
+        byte data[] = new byte[bufferSize];
+        while (isRecording) {
+          int read = audioRecord.read(data, 0, bufferSize);
+          if (read > 0) {
+            os.write(data, 0, read);
+          }
+          if (read < 0) {
+            break;
+          }
+        }
+      }
+    }.start();
+  }
+
+  /**
+   * Returns the sample rate for this recorder.
+   */
+  public int getSampleRate() {
+    return sampleRate;
+  }
+
+  /**
+   * Returns the audio source currently being used.
+   */
+  public int getAudioSource() {
+    return source;
+  }
+
+  /**
+   * Returns true if recorder is recording; False if not.
+   */
+  public boolean isRecording() {
+    return isRecording;
+  }
+
+  /**
+   * Returns the raw data.
+   */
+  public byte[] getByte() {
+    return os.toByteArray();
+  }
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioRoutingNotificationsActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioRoutingNotificationsActivity.java
new file mode 100644
index 0000000..b6a4255
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioRoutingNotificationsActivity.java
@@ -0,0 +1,146 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cts.verifier.audio;
+
+import com.android.cts.verifier.PassFailButtons;
+import com.android.cts.verifier.R;
+
+import android.content.Context;
+
+import android.media.AudioDeviceCallback;
+import android.media.AudioDeviceInfo;
+import android.media.AudioManager;
+import android.media.AudioRecord;
+import android.media.AudioTrack;
+
+import android.os.Bundle;
+import android.os.Handler;
+
+import android.util.Log;
+
+import android.view.View;
+import android.view.View.OnClickListener;
+
+import android.widget.Button;
+import android.widget.TextView;
+
+/**
+ * Tests AudioTrack and AudioRecord (re)Routing messages.
+ */
+public class AudioRoutingNotificationsActivity extends PassFailButtons.Activity {
+    private static final String TAG = "AudioRoutingNotificationsActivity";
+
+    Context mContext;
+
+    OnBtnClickListener mBtnClickListener = new OnBtnClickListener();
+
+    int mNumTrackNotifications = 0;
+    int mNumRecordNotifications = 0;
+
+    TrivialPlayer mAudioPlayer = new TrivialPlayer();
+    TrivialRecorder mAudioRecorder = new TrivialRecorder();
+
+    private class OnBtnClickListener implements OnClickListener {
+        @Override
+        public void onClick(View v) {
+            switch (v.getId()) {
+                case R.id.audio_routingnotification_playBtn:
+                    Log.i(TAG, "audio_routingnotification_playBtn");
+                    mAudioPlayer.start();
+                    break;
+
+                case R.id.audio_routingnotification_playStopBtn:
+                    Log.i(TAG, "audio_routingnotification_playStopBtn");
+                    mAudioPlayer.stop();
+                    break;
+
+                case R.id.audio_routingnotification_recordBtn:
+                    break;
+
+                case R.id.audio_routingnotification_recordStopBtn:
+                    break;
+            }
+        }
+    }
+
+    private class AudioTrackRoutingChangeListener implements AudioTrack.OnRoutingChangedListener {
+        public void onRoutingChanged(AudioTrack audioTrack) {
+            mNumTrackNotifications++;
+            TextView textView =
+                (TextView)findViewById(R.id.audio_routingnotification_audioTrack_change);
+            String msg = mContext.getResources().getString(
+                    R.string.audio_routingnotification_trackRoutingMsg);
+            AudioDeviceInfo routedDevice = audioTrack.getRoutedDevice();
+            CharSequence deviceName = routedDevice != null ? routedDevice.getProductName() : "none";
+            int deviceType = routedDevice != null ? routedDevice.getType() : -1;
+            textView.setText(msg + " - " +
+                             deviceName + " [0x" + Integer.toHexString(deviceType) + "]" +
+                             " - " + mNumTrackNotifications);
+        }
+    }
+
+    private class AudioRecordRoutingChangeListener implements AudioRecord.OnRoutingChangedListener {
+        public void onRoutingChanged(AudioRecord audioRecord) {
+            mNumRecordNotifications++;
+            TextView textView =
+                    (TextView)findViewById(R.id.audio_routingnotification_audioRecord_change);
+            String msg = mContext.getResources().getString(
+                    R.string.audio_routingnotification_recordRoutingMsg);
+            AudioDeviceInfo routedDevice = audioRecord.getRoutedDevice();
+            CharSequence deviceName = routedDevice != null ? routedDevice.getProductName() : "none";
+            int deviceType = routedDevice != null ? routedDevice.getType() : -1;
+            textView.setText(msg + " - " +
+                             deviceName + " [0x" + Integer.toHexString(deviceType) + "]" +
+                             " - " + mNumRecordNotifications);
+        }
+    }
+
+    @Override
+    protected void onCreate(Bundle savedInstanceState) {
+        super.onCreate(savedInstanceState);
+        setContentView(R.layout.audio_routingnotifications_test);
+
+        Button btn;
+        btn = (Button)findViewById(R.id.audio_routingnotification_playBtn);
+        btn.setOnClickListener(mBtnClickListener);
+        btn = (Button)findViewById(R.id.audio_routingnotification_playStopBtn);
+        btn.setOnClickListener(mBtnClickListener);
+        btn = (Button)findViewById(R.id.audio_routingnotification_recordBtn);
+        btn.setOnClickListener(mBtnClickListener);
+        btn = (Button)findViewById(R.id.audio_routingnotification_recordStopBtn);
+        btn.setOnClickListener(mBtnClickListener);
+
+        mContext = this;
+
+        AudioTrack audioTrack = mAudioPlayer.getAudioTrack();
+        audioTrack.addOnRoutingChangedListener(
+            new AudioTrackRoutingChangeListener(), new Handler());
+
+        AudioRecord audioRecord = mAudioRecorder.getAudioRecord();
+        audioRecord.addOnRoutingChangedListener(
+            new AudioRecordRoutingChangeListener(), new Handler());
+
+        setPassFailButtonClickListeners();
+    }
+
+    @Override
+    public void onBackPressed () {
+        mAudioPlayer.shutDown();
+        mAudioRecorder.shutDown();
+        super.onBackPressed();
+    }
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audio/Common.java b/apps/CtsVerifier/src/com/android/cts/verifier/audio/Common.java
new file mode 100644
index 0000000..df7460a
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audio/Common.java
@@ -0,0 +1,142 @@
+package com.android.cts.verifier.audio;
+
+import android.media.AudioManager;
+import android.media.AudioTrack;
+
+import java.util.ArrayList;
+import java.util.Random;
+
+/**
+ * This class stores common constants and methods.
+ */
+public class Common {
+
+  public static final int RECORDING_SAMPLE_RATE_HZ
+      = AudioRecordHelper.getInstance().getSampleRate();
+  public static final int PLAYING_SAMPLE_RATE_HZ
+      = AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_MUSIC);
+
+  // Default constants.
+  public static final double PASSING_THRESHOLD_DB = -40.0;
+  public static final double PIP_DURATION_S = 0.004;
+  public static final double PAUSE_DURATION_S = 0.016;
+  public static final int PREFIX_NUM_CHIPS = 1023;
+  public static final int PREFIX_SAMPLES_PER_CHIP = 4;
+  public static final double PREFIX_LENGTH_S = 0.1;
+  public static final double PAUSE_BEFORE_PREFIX_DURATION_S = 0.5;
+  public static final double PAUSE_AFTER_PREFIX_DURATION_S = 0.4;
+  public static final double MIN_FREQUENCY_HZ = 500;
+  public static final double MAX_FREQUENCY_HZ = 21000;
+  public static final double FREQUENCY_STEP_HZ = 100;
+  public static final int SIGNAL_MIN_STRENGTH_DB_ABOVE_NOISE = 10;
+  public static final int REPETITIONS = 5;
+  public static final int NOISE_SAMPLES = 3;
+
+  public static final double[] FREQUENCIES_ORIGINAL = originalFrequencies();
+  public static final int PIP_NUM = FREQUENCIES_ORIGINAL.length;
+  public static final int[] ORDER = order();
+  public static final double[] FREQUENCIES = frequencies();
+
+  public static final double[] WINDOW_FOR_RECORDER =
+      hann(Util.toLength(PIP_DURATION_S, RECORDING_SAMPLE_RATE_HZ));
+  public static final double[] WINDOW_FOR_PLAYER =
+      hann(Util.toLength(PIP_DURATION_S, PLAYING_SAMPLE_RATE_HZ));
+
+  public static final double[] PREFIX_FOR_RECORDER = prefix(RECORDING_SAMPLE_RATE_HZ);
+  public static final double[] PREFIX_FOR_PLAYER = prefix(PLAYING_SAMPLE_RATE_HZ);
+
+  /**
+   * Get a Hann window.
+   */
+  private static double[] hann(int windowWidth) {
+    double[] envelopeArray = new double[windowWidth];
+    for (int i = 0; i < windowWidth; i++) {
+      envelopeArray[i] = 0.5
+          * (1 - Math.cos(2 * Math.PI * i / windowWidth));
+    }
+    return envelopeArray;
+  }
+
+  /**
+   * Get a maximum length sequence, used as prefix to indicate start of signal.
+   */
+  private static double[] prefix(int rate) {
+    double[] codeSequence = new double[PREFIX_NUM_CHIPS];
+    for (int i = 0; i < PREFIX_NUM_CHIPS; i++) {
+      if (i < 10) {
+        codeSequence[i] = 1;
+      } else {
+        codeSequence[i] = -codeSequence[i - 6] * codeSequence[i - 7]
+            * codeSequence[i - 9] * codeSequence[i - 10];
+      }
+    }
+    double[] prefixArray = new double[PREFIX_NUM_CHIPS * PREFIX_SAMPLES_PER_CHIP];
+    int offset = 0;
+    for (int i = 0; i < PREFIX_NUM_CHIPS; i++) {
+      double value = codeSequence[i];
+      for (int j = 0; j < PREFIX_SAMPLES_PER_CHIP; j++) {
+        prefixArray[offset + j] = value;
+      }
+      offset += PREFIX_SAMPLES_PER_CHIP;
+    }
+    int prefixLength = (int) Math.round(PREFIX_LENGTH_S * rate);
+    double[] samplePrefixArray = new double[prefixLength];
+    for (int i = 0; i < prefixLength; i++) {
+      double index = (double) i / prefixLength * (prefixArray.length - 1);
+      samplePrefixArray[i] = (1 - index + Math.floor(index)) * prefixArray[(int) Math.floor(index)]
+          + (1 + index - Math.ceil(index)) * prefixArray[(int) Math.ceil(index)];
+    }
+    return samplePrefixArray;
+  }
+
+  /**
+   * Returns array consists the frequencies of the test pips in the order that will be used in test.
+   */
+  private static double[] frequencies() {
+    double[] originalFrequencies = originalFrequencies();
+
+    double[] randomFrequencies = new double[Common.REPETITIONS * originalFrequencies.length];
+    for (int i = 0; i < REPETITIONS * originalFrequencies.length; i++) {
+      randomFrequencies[i] = originalFrequencies[ORDER[i] % originalFrequencies.length];
+    }
+
+    return randomFrequencies;
+  }
+
+  /**
+   * Returns array consists the frequencies of the test pips.
+   */
+  private static double[] originalFrequencies() {
+    ArrayList<Double> frequencies = new ArrayList<Double>();
+    double frequency = Common.MIN_FREQUENCY_HZ;
+    while (frequency <= Common.MAX_FREQUENCY_HZ) {
+      frequencies.add(new Double(frequency));
+      if ((frequency >= 18500) && (frequency < 20000)) {
+        frequency += Common.FREQUENCY_STEP_HZ;
+      } else {
+        frequency += Common.FREQUENCY_STEP_HZ * 10;
+      }
+    }
+    Double[] frequenciesArray = frequencies.toArray(new Double[frequencies.size()]);
+    double[] frequenciesPrimitiveArray = new double[frequenciesArray.length];
+    for (int i = 0; i < frequenciesArray.length; i++) {
+      frequenciesPrimitiveArray[i] = frequenciesArray[i];
+    }
+    return frequenciesPrimitiveArray;
+  }
+
+  /**
+   * Fisher-Yates shuffle.
+   */
+  private static int[] order() {
+    int[] order = new int[REPETITIONS * PIP_NUM];
+    long seed = 0;
+    Random generator = new Random(seed);
+    for (int i = 0; i < REPETITIONS * PIP_NUM; i++) {
+      int j = generator.nextInt(i + 1);
+      order[i] = order[j];
+      order[j] = i;
+    }
+    return order;
+  }
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audio/Correlation.java b/apps/CtsVerifier/src/com/android/cts/verifier/audio/Correlation.java
new file mode 100644
index 0000000..75b04eb
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audio/Correlation.java
@@ -0,0 +1,156 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cts.verifier.audio;
+
+import android.util.Log;
+
+
+public class Correlation {
+
+    private int mBlockSize = 4096;
+    private int mSamplingRate = 44100;
+    private double [] mDataDownsampled = new double [mBlockSize];
+    private double [] mDataAutocorrelated = new double[mBlockSize];
+
+    public double mEstimatedLatencySamples = 0;
+    public double mEstimatedLatencyMs = 0;
+    public double mEstimatedLatencyConfidence = 0.0;
+
+    public void init(int blockSize, int samplingRate) {
+        mBlockSize = blockSize;
+        mSamplingRate = samplingRate;
+    }
+
+    public boolean computeCorrelation(double [] data, int samplingRate) {
+        boolean status = false;
+        log("Started Auto Correlation for data with " + data.length + " points");
+        mSamplingRate = samplingRate;
+
+        downsampleData(data, mDataDownsampled);
+
+        //correlation vector
+        autocorrelation(mDataDownsampled, mDataAutocorrelated);
+
+        int N = data.length; //all samples available
+        double groupSize =  (double) N / mBlockSize;  //samples per downsample point.
+
+        double maxValue = 0;
+        int maxIndex = -1;
+
+        double minLatencyMs = 8; //min latency expected. This algorithm should be improved.
+        int minIndex = (int)(0.5 + minLatencyMs * mSamplingRate / (groupSize*1000));
+
+        double average = 0;
+        double rms = 0;
+        //find max
+        for (int i=minIndex; i<mDataAutocorrelated.length; i++) {
+            average += mDataAutocorrelated[i];
+            rms += mDataAutocorrelated[i]*mDataAutocorrelated[i];
+            if (mDataAutocorrelated[i] > maxValue) {
+                maxValue = mDataAutocorrelated[i];
+                maxIndex = i;
+            }
+        }
+
+        rms = Math.sqrt(rms/mDataAutocorrelated.length);
+        average = average/mDataAutocorrelated.length;
+        log(String.format(" Maxvalue %f, max Index : %d/%d (%d)  minIndex=%d",maxValue, maxIndex,
+                mDataAutocorrelated.length, data.length, minIndex));
+
+        log(String.format("  average : %.3f  rms: %.3f", average, rms));
+
+        mEstimatedLatencyConfidence = 0.0;
+        if (average>0) {
+            double factor = 3.0;
+
+            double raw = (rms-average) /(factor*average);
+            log(String.format("Raw: %.3f",raw));
+            mEstimatedLatencyConfidence = Math.max(Math.min(raw, 1.0),0.0);
+        }
+
+        log(String.format(" ****Confidence: %.2f",mEstimatedLatencyConfidence));
+
+        mEstimatedLatencySamples = maxIndex*groupSize;
+
+        mEstimatedLatencyMs = mEstimatedLatencySamples *1000/mSamplingRate;
+
+        log(String.format(" latencySamples: %.2f  %.2f ms", mEstimatedLatencySamples,
<