[automerger skipped] Merge "[RESTRICT AUTOMERGE]: STS include missing apps from EphemeralTest" into oc-dev am: c5f5cb66f7 -s ours am: 7cc3623902 -s ours am: 1b2f853d5a -s ours

am skip reason: subject contains skip directive

Original change: https://googleplex-android-review.googlesource.com/c/platform/cts/+/13121463

MUST ONLY BE SUBMITTED BY AUTOMERGER

Change-Id: I96e2435023276a56182e1ce4f595c7b90e903406
diff --git a/.gitignore b/.gitignore
index 88eb857..baf394f 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,9 +1,7 @@
 *.pyc
 *.*~
 *.py~
-.project
 .cproject
-.classpath
 /bin
 .idea/*
 .idea/
diff --git a/Android.mk b/Android.mk
index 1894043..311b4eb 100644
--- a/Android.mk
+++ b/Android.mk
@@ -13,7 +13,5 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-
 include cts/CtsCoverage.mk
 include $(call all-subdir-makefiles)
-
diff --git a/CleanSpec.mk b/CleanSpec.mk
index 71ada3d..e42e0d9 100644
--- a/CleanSpec.mk
+++ b/CleanSpec.mk
@@ -46,6 +46,10 @@
 
 $(call add-clean-step, rm -rf $(HOST_OUT_INTERMEDIATES)/EXECUTABLES/vm-tests-tf_intermediates)
 $(call add-clean-step, rm -rf $(OUT_DIR)/host/common/obj/JAVA_LIBRARIES/cts-tradefed_intermediates/com/android/compatibility/SuiteInfo.java)
+$(call add-clean-step, rm -rf $(HOST_OUT)/cts/android-cts/testcases/CtsUiHostTestCases*)
+$(call add-clean-step, rm -rf $(HOST_OUT)/cts_instant/android-cts_instant/testcases/CtsJobSchedulerTestCases*)
+$(call add-clean-step, rm -rf $(HOST_OUT)/cts_instant/android-cts_instant/testcases/CtsUiHostTestCases*)
+$(call add-clean-step, rm -rf $(HOST_OUT)/cts_instant/android-cts_instant/testcases/CtsDpiTestCases2*)
 
 # ************************************************
 # NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST
diff --git a/CtsCoverage.mk b/CtsCoverage.mk
index 895af4d..8a0eef8 100644
--- a/CtsCoverage.mk
+++ b/CtsCoverage.mk
@@ -94,13 +94,14 @@
 .PHONY: cts-combined-xml-coverage
 cts-combined-xml-coverage : $(cts-combined-xml-coverage-report)
 
-# Put the test coverage report in the dist dir if "cts" is among the build goals.
-ifneq ($(filter cts, $(MAKECMDGOALS)),)
-  $(call dist-for-goals, cts, $(cts-test-coverage-report):cts-test-coverage-report.html)
-  $(call dist-for-goals, cts, $(cts-verifier-coverage-report):cts-verifier-coverage-report.html)
-  $(call dist-for-goals, cts, $(cts-combined-coverage-report):cts-combined-coverage-report.html)
-  $(call dist-for-goals, cts, $(cts-combined-xml-coverage-report):cts-combined-coverage-report.xml)
-endif
+.PHONY: cts-api-coverage
+cts-coverage-report-all: cts-test-coverage cts-verifier-coverage cts-combined-coverage cts-combined-xml-coverage
+
+# Put the test coverage report in the dist dir if "cts-api-coverage" is among the build goals.
+$(call dist-for-goals, cts-api-coverage, $(cts-test-coverage-report):cts-test-coverage-report.html)
+$(call dist-for-goals, cts-api-coverage, $(cts-verifier-coverage-report):cts-verifier-coverage-report.html)
+$(call dist-for-goals, cts-api-coverage, $(cts-combined-coverage-report):cts-combined-coverage-report.html)
+$(call dist-for-goals, cts-api-coverage, $(cts-combined-xml-coverage-report):cts-combined-coverage-report.xml)
 
 # Arguments;
 #  1 - Name of the report printed out on the screen
diff --git a/PREUPLOAD.cfg b/PREUPLOAD.cfg
index cc2aad9..5f3e99f 100644
--- a/PREUPLOAD.cfg
+++ b/PREUPLOAD.cfg
@@ -6,6 +6,8 @@
                       tests/tests/animation/
                       tests/tests/print/
                       tests/tests/text/
+                      tests/tests/graphics/
                       tests/tests/transition/
+                      tests/tests/uirendering/
                       tests/tests/view/
                       tests/tests/widget/
diff --git a/apps/CameraITS/build/envsetup.sh b/apps/CameraITS/build/envsetup.sh
index 13c907c..a21108e 100644
--- a/apps/CameraITS/build/envsetup.sh
+++ b/apps/CameraITS/build/envsetup.sh
@@ -31,12 +31,19 @@
 python -V 2>&1 | grep -q "Python 2.7" || \
     echo ">> Require python 2.7" >&2
 
-for M in numpy PIL Image matplotlib pylab scipy.stats scipy.spatial
+for M in numpy PIL matplotlib scipy.stats scipy.spatial
 do
     python -c "import $M" >/dev/null 2>&1 || \
         echo ">> Require Python $M module" >&2
 done
 
+for N in 'PIL Image' 'matplotlib pylab'
+do
+    IFS=' ' read module submodule <<< "$N"
+    python -c "from $module import $submodule" >/dev/null 2>&1 || \
+        echo ">> Require Python $module module $submodule submodule" >&2
+done
+
 CV2_VER=$(python -c "\
 try:
     import cv2
diff --git a/apps/CameraITS/pymodules/its/caps.py b/apps/CameraITS/pymodules/its/caps.py
index e6f096f..d75532b 100644
--- a/apps/CameraITS/pymodules/its/caps.py
+++ b/apps/CameraITS/pymodules/its/caps.py
@@ -12,9 +12,10 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import unittest
-import its.objects
 import sys
+import unittest
+
+import its.objects
 
 
 def skip_unless(cond):
@@ -97,8 +98,8 @@
     return props.has_key("android.info.supportedHardwareLevel") and \
            props["android.info.supportedHardwareLevel"] == 2
 
-def radial_distortion_correction(props):
-    """Returns whether a device supports RADIAL_DISTORTION_CORRECTION
+def distortion_correction(props):
+    """Returns whether a device supports DISTORTION_CORRECTION
     capabilities.
 
     Args:
@@ -107,8 +108,8 @@
     Returns:
         Boolean.
     """
-    return props.has_key("android.lens.radialDistortion") and \
-           props["android.lens.radialDistortion"] is not None
+    return props.has_key("android.lens.distortion") and \
+           props["android.lens.distortion"] is not None
 
 def manual_sensor(props):
     """Returns whether a device supports MANUAL_SENSOR capabilities.
@@ -446,6 +447,59 @@
     return props.has_key("android.lens.info.minimumFocusDistance") and \
         props["android.lens.info.minimumFocusDistance"] == 0
 
+def logical_multi_camera(props):
+    """Returns whether a device is a logical multi-camera.
+
+    Args:
+        props: Camera properties object.
+
+    Return:
+        Boolean.
+    """
+    return props.has_key("android.request.availableCapabilities") and \
+           11 in props["android.request.availableCapabilities"]
+
+def logical_multi_camera_physical_ids(props):
+    """Returns a logical multi-camera's underlying physical cameras.
+
+    Args:
+        props: Camera properties object.
+
+    Return:
+        list of physical cameras backing the logical multi-camera.
+    """
+    physicalIdsList = []
+    if logical_multi_camera(props):
+        physicalIdsList = props['camera.characteristics.physicalCamIds'];
+    return physicalIdsList
+
+def mono_camera(props):
+    """Returns whether a device is monochromatic.
+
+    Args:
+        props: Camera properties object.
+
+    Return:
+        Boolean.
+    """
+    return props.has_key("android.request.availableCapabilities") and \
+           12 in props["android.request.availableCapabilities"]
+
+
+def face_detect(props):
+    """Returns whether a device has face detection mode.
+
+    props['android.statistics.info.availableFaceDetectModes'] != 0 is face det
+
+    Args:
+        props: Camera properties objects.
+
+    Returns:
+        Boolean.
+    """
+    return props.has_key("android.statistics.info.availableFaceDetectModes") and \
+        props["android.statistics.info.availableFaceDetectModes"] != [0]
+
 
 def debug_mode():
     """Returns True/False for whether test is run in debug mode.
@@ -459,6 +513,19 @@
     return False
 
 
+def backward_compatible(props):
+    """Returns whether a device supports BACKWARD_COMPATIBLE.
+
+    Args:
+        props: Camera properties object.
+
+    Returns:
+        Boolean.
+    """
+    return props.has_key("android.request.availableCapabilities") and \
+              0 in props["android.request.availableCapabilities"]
+
+
 class __UnitTest(unittest.TestCase):
     """Run a suite of unit tests on this module.
     """
@@ -466,4 +533,3 @@
 
 if __name__ == '__main__':
     unittest.main()
-
diff --git a/apps/CameraITS/pymodules/its/cv2image.py b/apps/CameraITS/pymodules/its/cv2image.py
index 83e654e..2004846 100644
--- a/apps/CameraITS/pymodules/its/cv2image.py
+++ b/apps/CameraITS/pymodules/its/cv2image.py
@@ -12,27 +12,38 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import matplotlib
-matplotlib.use('Agg')
-
-import its.error
-from matplotlib import pylab
-import sys
-from PIL import Image
-import numpy
-import math
-import unittest
-import cStringIO
-import scipy.stats
-import copy
-import cv2
 import os
+import unittest
+
+import cv2
+import its.caps
+import its.device
+import its.error
+import its.image
+import numpy
+
+VGA_HEIGHT = 480
+VGA_WIDTH = 640
+
 
 def scale_img(img, scale=1.0):
     """Scale and image based on a real number scale factor."""
     dim = (int(img.shape[1]*scale), int(img.shape[0]*scale))
     return cv2.resize(img.copy(), dim, interpolation=cv2.INTER_AREA)
 
+
+def gray_scale_img(img):
+    """Return gray scale version of image."""
+    if len(img.shape) == 2:
+        img_gray = img.copy()
+    elif len(img.shape) == 3:
+        if img.shape[2] == 1:
+            img_gray = img[:, :, 0].copy()
+        else:
+            img_gray = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY)
+    return img_gray
+
+
 class Chart(object):
     """Definition for chart object.
 
@@ -40,7 +51,7 @@
     """
 
     def __init__(self, chart_file, height, distance, scale_start, scale_stop,
-                 scale_step):
+                 scale_step, camera_id=None):
         """Initial constructor for class.
 
         Args:
@@ -50,6 +61,7 @@
             scale_start:    float; start value for scaling for chart search
             scale_stop:     float; stop value for scaling for chart search
             scale_step:     float; step value for scaling for chart search
+            camera_id:      int; camera used for extractor
         """
         self._file = chart_file
         self._height = height
@@ -57,6 +69,23 @@
         self._scale_start = scale_start
         self._scale_stop = scale_stop
         self._scale_step = scale_step
+        self.xnorm, self.ynorm, self.wnorm, self.hnorm, self.scale = its.image.chart_located_per_argv()
+        if not self.xnorm:
+            with its.device.ItsSession(camera_id) as cam:
+                props = cam.get_camera_properties()
+                if its.caps.read_3a(props):
+                    self.locate(cam, props)
+                else:
+                    print 'Chart locator skipped.'
+                    self._set_scale_factors_to_one()
+
+    def _set_scale_factors_to_one(self):
+        """Set scale factors to 1.0 for skipped tests."""
+        self.wnorm = 1.0
+        self.hnorm = 1.0
+        self.xnorm = 0.0
+        self.ynorm = 0.0
+        self.scale = 1.0
 
     def _calc_scale_factors(self, cam, props, fmt, s, e, fd):
         """Take an image with s, e, & fd to find the chart location.
@@ -79,7 +108,7 @@
         req['android.lens.focusDistance'] = fd
         cap_chart = its.image.stationary_lens_cap(cam, req, fmt)
         img_3a = its.image.convert_capture_to_rgb_image(cap_chart, props)
-        img_3a = its.image.flip_mirror_img_per_argv(img_3a)
+        img_3a = its.image.rotate_img_per_argv(img_3a)
         its.image.write_image(img_3a, 'af_scene.jpg')
         template = cv2.imread(self._file, cv2.IMREAD_ANYDEPTH)
         focal_l = cap_chart['metadata']['android.lens.focalLength']
@@ -95,44 +124,44 @@
         print 'Chart/image scale factor = %.2f' % scale_factor
         return template, img_3a, scale_factor
 
-    def locate(self, cam, props, fmt, s, e, fd):
-        """Find the chart in the image.
+    def locate(self, cam, props):
+        """Find the chart in the image, and append location to chart object.
 
-        Args:
-            cam:            An open device session
-            props:          Properties of cam
-            fmt:            Image format for the capture
-            s:              Sensitivity for the AF request as defined in
-                            android.sensor.sensitivity
-            e:              Exposure time for the AF request as defined in
-                            android.sensor.exposureTime
-            fd:             float; autofocus lens position
-
-        Returns:
+        The values appended are:
             xnorm:          float; [0, 1] left loc of chart in scene
             ynorm:          float; [0, 1] top loc of chart in scene
             wnorm:          float; [0, 1] width of chart in scene
             hnorm:          float; [0, 1] height of chart in scene
+            scale:          float; scale factor to extract chart
+
+        Args:
+            cam:            An open device session
+            props:          Camera properties
         """
-        chart, scene, s_factor = self._calc_scale_factors(cam, props, fmt,
-                                                          s, e, fd)
+        if its.caps.read_3a(props):
+            s, e, _, _, fd = cam.do_3a(get_results=True)
+            fmt = {'format': 'yuv', 'width': VGA_WIDTH, 'height': VGA_HEIGHT}
+            chart, scene, s_factor = self._calc_scale_factors(cam, props, fmt,
+                                                              s, e, fd)
+        else:
+            print 'Chart locator skipped.'
+            self._set_scale_factors_to_one()
+            return
         scale_start = self._scale_start * s_factor
         scale_stop = self._scale_stop * s_factor
         scale_step = self._scale_step * s_factor
+        self.scale = s_factor
         max_match = []
         # check for normalized image
         if numpy.amax(scene) <= 1.0:
             scene = (scene * 255.0).astype(numpy.uint8)
-        if len(scene.shape) == 2:
-            scene_gray = scene.copy()
-        elif len(scene.shape) == 3:
-            if scene.shape[2] == 1:
-                scene_gray = scene[:, :, 0]
-            else:
-                scene_gray = cv2.cvtColor(scene.copy(), cv2.COLOR_RGB2GRAY)
+        scene_gray = gray_scale_img(scene)
         print 'Finding chart in scene...'
         for scale in numpy.arange(scale_start, scale_stop, scale_step):
             scene_scaled = scale_img(scene_gray, scale)
+            if (scene_scaled.shape[0] < chart.shape[0] or
+                scene_scaled.shape[1] < chart.shape[1]):
+                continue
             result = cv2.matchTemplate(scene_scaled, chart, cv2.TM_CCOEFF)
             _, opt_val, _, top_left_scaled = cv2.minMaxLoc(result)
             # print out scale and match
@@ -142,26 +171,139 @@
         # determine if optimization results are valid
         opt_values = [x[0] for x in max_match]
         if 2.0*min(opt_values) > max(opt_values):
-            estring = ('Unable to find chart in scene!\n'
+            estring = ('Warning: unable to find chart in scene!\n'
                        'Check camera distance and self-reported '
                        'pixel pitch, focal length and hyperfocal distance.')
-            raise its.error.Error(estring)
-        # find max and draw bbox
-        match_index = max_match.index(max(max_match, key=lambda x: x[0]))
-        scale = scale_start + scale_step * match_index
-        print 'Optimum scale factor: %.3f' %  scale
-        top_left_scaled = max_match[match_index][1]
-        h, w = chart.shape
-        bottom_right_scaled = (top_left_scaled[0] + w, top_left_scaled[1] + h)
-        top_left = (int(top_left_scaled[0]/scale),
-                    int(top_left_scaled[1]/scale))
-        bottom_right = (int(bottom_right_scaled[0]/scale),
-                        int(bottom_right_scaled[1]/scale))
-        wnorm = float((bottom_right[0]) - top_left[0]) / scene.shape[1]
-        hnorm = float((bottom_right[1]) - top_left[1]) / scene.shape[0]
-        xnorm = float(top_left[0]) / scene.shape[1]
-        ynorm = float(top_left[1]) / scene.shape[0]
-        return xnorm, ynorm, wnorm, hnorm
+            print estring
+            self._set_scale_factors_to_one()
+        else:
+            if (max(opt_values) == opt_values[0] or
+                        max(opt_values) == opt_values[len(opt_values)-1]):
+                estring = ('Warning: chart is at extreme range of locator '
+                           'check.\n')
+                print estring
+            # find max and draw bbox
+            match_index = max_match.index(max(max_match, key=lambda x: x[0]))
+            self.scale = scale_start + scale_step * match_index
+            print 'Optimum scale factor: %.3f' %  self.scale
+            top_left_scaled = max_match[match_index][1]
+            h, w = chart.shape
+            bottom_right_scaled = (top_left_scaled[0] + w,
+                                   top_left_scaled[1] + h)
+            top_left = (int(top_left_scaled[0]/self.scale),
+                        int(top_left_scaled[1]/self.scale))
+            bottom_right = (int(bottom_right_scaled[0]/self.scale),
+                            int(bottom_right_scaled[1]/self.scale))
+            self.wnorm = float((bottom_right[0]) - top_left[0]) / scene.shape[1]
+            self.hnorm = float((bottom_right[1]) - top_left[1]) / scene.shape[0]
+            self.xnorm = float(top_left[0]) / scene.shape[1]
+            self.ynorm = float(top_left[1]) / scene.shape[0]
+
+
+def get_angle(input_img):
+    """Computes anglular inclination of chessboard in input_img.
+
+    Angle estimation algoritm description:
+        Input: 2D grayscale image of chessboard.
+        Output: Angle of rotation of chessboard perpendicular to
+            chessboard. Assumes chessboard and camera are parallel to
+            each other.
+
+        1) Use adaptive threshold to make image binary
+        2) Find countours
+        3) Filter out small contours
+        4) Filter out all non-square contours
+        5) Compute most common square shape.
+            The assumption here is that the most common square instances
+            are the chessboard squares. We've shown that with our current
+            tuning, we can robustly identify the squares on the sensor fusion
+            chessboard.
+        6) Return median angle of most common square shape.
+
+    USAGE NOTE: This function has been tuned to work for the chessboard used in
+    the sensor_fusion tests. See images in test_images/rotated_chessboard/ for
+    sample captures. If this function is used with other chessboards, it may not
+    work as expected.
+
+    TODO: Make algorithm more robust so it works on any type of
+    chessboard.
+
+    Args:
+        input_img (2D numpy.ndarray): Grayscale image stored as a 2D
+            numpy array.
+
+    Returns:
+        Median angle of squares in degrees identified in the image.
+    """
+    # Tuning parameters
+    min_square_area = (float)(input_img.shape[1] * 0.05)
+
+    # Creates copy of image to avoid modifying original.
+    img = numpy.array(input_img, copy=True)
+
+    # Scale pixel values from 0-1 to 0-255
+    img *= 255
+    img = img.astype(numpy.uint8)
+
+    thresh = cv2.adaptiveThreshold(
+            img, 255, cv2.ADAPTIVE_THRESH_MEAN_C, cv2.THRESH_BINARY, 201, 2)
+
+    # Find all contours
+    contours = []
+    cv2_version = cv2.__version__
+    if cv2_version.startswith('2.4.'):
+        contours, _ = cv2.findContours(
+                thresh, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
+    elif cv2_version.startswith('3.2.'):
+        _, contours, _ = cv2.findContours(
+                thresh, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
+
+    # Filter contours to squares only.
+    square_contours = []
+
+    for contour in contours:
+        rect = cv2.minAreaRect(contour)
+        _, (width, height), angle = rect
+
+        # Skip non-squares (with 0.1 tolerance)
+        tolerance = 0.1
+        if width < height * (1 - tolerance) or width > height * (1 + tolerance):
+            continue
+
+        # Remove very small contours.
+        # These are usually just tiny dots due to noise.
+        area = cv2.contourArea(contour)
+        if area < min_square_area:
+            continue
+
+        if cv2_version.startswith('2.4.'):
+            box = numpy.int0(cv2.cv.BoxPoints(rect))
+        elif cv2_version.startswith('3.2.'):
+            box = numpy.int0(cv2.boxPoints(rect))
+        square_contours.append(contour)
+
+    areas = []
+    for contour in square_contours:
+        area = cv2.contourArea(contour)
+        areas.append(area)
+
+    median_area = numpy.median(areas)
+
+    filtered_squares = []
+    filtered_angles = []
+    for square in square_contours:
+        area = cv2.contourArea(square)
+        if area < median_area * 0.90 or area > median_area * 1.10:
+            continue
+
+        filtered_squares.append(square)
+        _, (width, height), angle = cv2.minAreaRect(square)
+        filtered_angles.append(angle)
+
+    if len(filtered_angles) < 10:
+        return None
+
+    return numpy.median(filtered_angles)
 
 
 class __UnitTest(unittest.TestCase):
@@ -186,12 +328,63 @@
             blur = cv2.blur(chart, (j, j))
             blur = blur[:, :, numpy.newaxis]
             sharpness[j] = (yuv_full_scale *
-                    its.image.compute_image_sharpness(blur / white_level))
+                            its.image.compute_image_sharpness(blur /
+                                                              white_level))
         self.assertTrue(numpy.isclose(sharpness[2]/sharpness[4],
                                       numpy.sqrt(2), atol=0.1))
         self.assertTrue(numpy.isclose(sharpness[4]/sharpness[8],
                                       numpy.sqrt(2), atol=0.1))
 
+    def test_get_angle_identify_unrotated_chessboard_angle(self):
+        basedir = os.path.join(
+                os.path.dirname(__file__), 'test_images/rotated_chessboards/')
+
+        normal_img_path = os.path.join(basedir, 'normal.jpg')
+        wide_img_path = os.path.join(basedir, 'wide.jpg')
+
+        normal_img = cv2.cvtColor(
+                cv2.imread(normal_img_path), cv2.COLOR_BGR2GRAY)
+        wide_img = cv2.cvtColor(
+                cv2.imread(wide_img_path), cv2.COLOR_BGR2GRAY)
+
+        assert get_angle(normal_img) == 0
+        assert get_angle(wide_img) == 0
+
+    def test_get_angle_identify_rotated_chessboard_angle(self):
+        basedir = os.path.join(
+                os.path.dirname(__file__), 'test_images/rotated_chessboards/')
+
+        # Array of the image files and angles containing rotated chessboards.
+        test_cases = [
+                ('_15_ccw', 15),
+                ('_30_ccw', 30),
+                ('_45_ccw', 45),
+                ('_60_ccw', 60),
+                ('_75_ccw', 75),
+                ('_90_ccw', 90)
+        ]
+
+        # For each rotated image pair (normal, wide). Check if angle is
+        # identified as expected.
+        for suffix, angle in test_cases:
+            # Define image paths
+            normal_img_path = os.path.join(
+                    basedir, 'normal{}.jpg'.format(suffix))
+            wide_img_path = os.path.join(
+                    basedir, 'wide{}.jpg'.format(suffix))
+
+            # Load and color convert images
+            normal_img = cv2.cvtColor(
+                    cv2.imread(normal_img_path), cv2.COLOR_BGR2GRAY)
+            wide_img = cv2.cvtColor(
+                    cv2.imread(wide_img_path), cv2.COLOR_BGR2GRAY)
+
+            # Assert angle is as expected up to 2.0 degrees of accuracy.
+            assert numpy.isclose(
+                    abs(get_angle(normal_img)), angle, 2.0)
+            assert numpy.isclose(
+                    abs(get_angle(wide_img)), angle, 2.0)
+
 
 if __name__ == '__main__':
     unittest.main()
diff --git a/apps/CameraITS/pymodules/its/device.py b/apps/CameraITS/pymodules/its/device.py
index 49bafc4..a1de8cf 100644
--- a/apps/CameraITS/pymodules/its/device.py
+++ b/apps/CameraITS/pymodules/its/device.py
@@ -25,8 +25,7 @@
 import hashlib
 import numpy
 import string
-
-CMD_DELAY = 1  # seconds
+import unicodedata
 
 
 class ItsSession(object):
@@ -83,6 +82,8 @@
     RESULT_VALUES = {RESULT_PASS, RESULT_FAIL, RESULT_NOT_EXECUTED}
     RESULT_KEY = 'result'
     SUMMARY_KEY = 'summary'
+    START_TIME_KEY = 'start'
+    END_TIME_KEY = 'end'
 
     adb = "adb -d"
     device_id = ""
@@ -118,10 +119,10 @@
             try:
                 socket_lock.bind((ItsSession.IPADDR, ItsSession.LOCK_PORT))
                 break
-            except socket.error:
+            except socket.error or socket.timeout:
                 if i == NUM_RETRIES - 1:
                     raise its.error.Error(self.device_id,
-                                          "acquiring socket lock timed out")
+                                          "socket lock returns error")
                 else:
                     time.sleep(RETRY_WAIT_TIME_SEC)
 
@@ -203,11 +204,7 @@
 
         # TODO: Figure out why "--user 0" is needed, and fix the problem.
         _run('%s shell am force-stop --user 0 %s' % (self.adb, self.PACKAGE))
-        _run(('%s shell am start --user 0 '
-              'com.android.cts.verifier/.camera.its.ItsTestActivity '
-              '--activity-brought-to-front') % self.adb)
-        time.sleep(CMD_DELAY)
-        _run(('%s shell am startservice --user 0 -t text/plain '
+        _run(('%s shell am start-foreground-service --user 0 -t text/plain '
               '-a %s') % (self.adb, self.INTENT_START))
 
         # Wait until the socket is ready to accept a connection.
@@ -221,7 +218,10 @@
                 break
         proc.kill()
 
-    def __init__(self):
+    def __init__(self, camera_id=None):
+        self._camera_id = camera_id
+
+    def __enter__(self):
         # Initialize device id and adb command.
         self.device_id = get_device_id()
         self.adb = "adb -s " + self.device_id
@@ -230,17 +230,13 @@
         self.__init_socket_port()
 
         self.__close_camera()
-        self.__open_camera()
-
-    def __del__(self):
-        if hasattr(self, 'sock') and self.sock:
-            self.__close_camera()
-            self.sock.close()
-
-    def __enter__(self):
+        self.__open_camera(self._camera_id)
         return self
 
     def __exit__(self, type, value, traceback):
+        if hasattr(self, 'sock') and self.sock:
+            self.__close_camera()
+            self.sock.close()
         return False
 
     def __read_response_from_socket(self):
@@ -267,12 +263,17 @@
             buf = numpy.frombuffer(buf, dtype=numpy.uint8)
         return jobj, buf
 
-    def __open_camera(self):
-        # Get the camera ID to open as an argument.
-        camera_id = 0
-        for s in sys.argv[1:]:
-            if s[:7] == "camera=" and len(s) > 7:
-                camera_id = int(s[7:])
+    def __open_camera(self, camera_id):
+        # Get the camera ID to open if it is an argument as a single camera.
+        # This allows passing camera=# to individual tests at command line
+        # and camera=#,#,# or an no camera argv with tools/run_all_tests.py.
+        if not camera_id:
+            camera_id = 0
+            for s in sys.argv[1:]:
+                if s[:7] == "camera=" and len(s) > 7:
+                    camera_ids = s[7:].split(",")
+                    if len(camera_ids) == 1:
+                        camera_id = camera_ids[0]
         cmd = {"cmdName":"open", "cameraId":camera_id}
         self.sock.send(json.dumps(cmd) + "\n")
         data,_ = self.__read_response_from_socket()
@@ -399,13 +400,33 @@
         self.props = data['objValue']['cameraProperties']
         return data['objValue']['cameraProperties']
 
+    def get_camera_properties_by_id(self, camera_id):
+        """Get the camera properties object for device with camera_id
+
+        Args:
+            camera_id: The ID string of the camera
+
+        Returns:
+            The Python dictionary object for the CameraProperties object. Empty
+            if no such device exists.
+
+        """
+        cmd = {}
+        cmd["cmdName"] = "getCameraPropertiesById"
+        cmd["cameraId"] = camera_id
+        self.sock.send(json.dumps(cmd) + "\n")
+        data,_ = self.__read_response_from_socket()
+        if data['tag'] != 'cameraProperties':
+            raise its.error.Error('Invalid command response')
+        return data['objValue']['cameraProperties']
+
     def do_3a(self, regions_ae=[[0,0,1,1,1]],
                     regions_awb=[[0,0,1,1,1]],
                     regions_af=[[0,0,1,1,1]],
                     do_ae=True, do_awb=True, do_af=True,
                     lock_ae=False, lock_awb=False,
                     get_results=False,
-                    ev_comp=0):
+                    ev_comp=0, mono_camera=False):
         """Perform a 3A operation on the device.
 
         Triggers some or all of AE, AWB, and AF, and returns once they have
@@ -425,6 +446,7 @@
             lock_awb: Request AWB lock after convergence, and wait for it.
             get_results: Return the 3A results from this function.
             ev_comp: An EV compensation value to use when running AE.
+            mono_camera: Boolean for monochrome camera.
 
         Region format in args:
             Arguments are lists of weighted regions; each weighted region is a
@@ -485,8 +507,9 @@
                 raise its.error.Error('Invalid command response')
         if converged and not get_results:
             return None,None,None,None,None
-        if (do_ae and ae_sens == None or do_awb and awb_gains == None
+        if (do_ae and ae_sens == None or (not mono_camera and do_awb and awb_gains == None)
                 or do_af and af_dist == None or not converged):
+
             raise its.error.Error('3A failed to converge')
         return ae_sens, ae_exp, awb_gains, awb_transform, af_dist
 
@@ -511,6 +534,17 @@
         "dng", "raw", "raw10", "raw12", or "rawStats". The default is a YUV420
         frame ("yuv") corresponding to a full sensor frame.
 
+        Optionally the out_surfaces field can specify physical camera id(s) if the
+        current camera device is a logical multi-camera. The physical camera id
+        must refer to a physical camera backing this logical camera device. And
+        only "yuv", "raw", "raw10", "raw12" support the physical camera id field.
+
+        Currently only 2 physical streams with the same format are supported, one
+        from each physical camera:
+        - yuv physical streams of the same size.
+        - raw physical streams with the same or different sizes, depending on
+          device capability. (Different physical cameras may have different raw sizes).
+
         Note that one or more surfaces can be specified, allowing a capture to
         request images back in multiple formats (e.g.) raw+yuv, raw+jpeg,
         yuv+jpeg, raw+yuv+jpeg. If the size is omitted for a surface, the
@@ -672,14 +706,36 @@
             cmd["outputSurfaces"] = [{"format": "yuv",
                                       "width" : max_yuv_size[0],
                                       "height": max_yuv_size[1]}]
+
+        # Figure out requested physical camera ids, physical and logical
+        # streams.
+        physical_cam_ids = {}
+        physical_buffers = {}
+        physical_cam_format = None
+        logical_cam_formats = []
+        for i,s in enumerate(cmd["outputSurfaces"]):
+            if "format" in s and s["format"] in ["yuv", "raw", "raw10", "raw12"]:
+                if "physicalCamera" in s:
+                    if physical_cam_format is not None and s["format"] != physical_cam_format:
+                        raise its.error.Error('ITS does not support capturing multiple ' +
+                                              'physical formats yet')
+                    physical_cam_ids[i] = s["physicalCamera"]
+                    physical_buffers[s["physicalCamera"]] = []
+                    physical_cam_format = s["format"]
+                else:
+                    logical_cam_formats.append(s["format"])
+            else:
+                logical_cam_formats.append(s["format"])
+
         ncap = len(cmd["captureRequests"])
         nsurf = 1 if out_surfaces is None else len(cmd["outputSurfaces"])
         # Only allow yuv output to multiple targets
-        yuv_surfaces = [s for s in cmd["outputSurfaces"] if s["format"]=="yuv"]
-        n_yuv = len(yuv_surfaces)
+        logical_yuv_surfaces = [s for s in cmd["outputSurfaces"] if s["format"]=="yuv"\
+                        and "physicalCamera" not in s]
+        n_yuv = len(logical_yuv_surfaces)
         # Compute the buffer size of YUV targets
         yuv_maxsize_1d = 0
-        for s in yuv_surfaces:
+        for s in logical_yuv_surfaces:
             if not ("width" in s and "height" in s):
                 if self.props is None:
                     raise its.error.Error('Camera props are unavailable')
@@ -690,14 +746,14 @@
         yuv_sizes = [c["width"]*c["height"]*3/2
                      if "width" in c and "height" in c
                      else yuv_maxsize_1d
-                     for c in yuv_surfaces]
+                     for c in logical_yuv_surfaces]
         # Currently we don't pass enough metadta from ItsService to distinguish
         # different yuv stream of same buffer size
         if len(yuv_sizes) != len(set(yuv_sizes)):
             raise its.error.Error(
                     'ITS does not support yuv outputs of same buffer size')
-        if len(formats) > len(set(formats)):
-            if n_yuv != len(formats) - len(set(formats)) + 1:
+        if len(logical_cam_formats) > len(set(logical_cam_formats)):
+          if n_yuv != len(logical_cam_formats) - len(set(logical_cam_formats)) + 1:
                 raise its.error.Error('Duplicate format requested')
 
         raw_formats = 0;
@@ -736,6 +792,7 @@
                 "rawStats":[], "dng":[], "jpeg":[]}
         yuv_bufs = {size:[] for size in yuv_sizes}
         mds = []
+        physical_mds = []
         widths = None
         heights = None
         while nbufs < ncap*nsurf or len(mds) < ncap:
@@ -752,12 +809,18 @@
                 nbufs += 1
             elif jsonObj['tag'] == 'captureResults':
                 mds.append(jsonObj['objValue']['captureResult'])
+                physical_mds.append(jsonObj['objValue']['physicalResults'])
                 outputs = jsonObj['objValue']['outputs']
                 widths = [out['width'] for out in outputs]
                 heights = [out['height'] for out in outputs]
             else:
-                # Just ignore other tags
-                None
+                tagString = unicodedata.normalize('NFKD', jsonObj['tag']).encode('ascii', 'ignore');
+                for x in ['rawImage', 'raw10Image', 'raw12Image', 'yuvImage']:
+                    if (tagString.startswith(x)):
+                        physicalId = jsonObj['tag'][len(x):];
+                        if physicalId in physical_cam_ids.values():
+                            physical_buffers[physicalId].append(buf)
+                            nbufs += 1
         rets = []
         for j,fmt in enumerate(formats):
             objs = []
@@ -766,8 +829,17 @@
                 obj["width"] = widths[j]
                 obj["height"] = heights[j]
                 obj["format"] = fmt
-                obj["metadata"] = mds[i]
-                if fmt == 'yuv':
+                if j in physical_cam_ids:
+                    for physical_md in physical_mds[i]:
+                        if physical_cam_ids[j] in physical_md:
+                            obj["metadata"] = physical_md[physical_cam_ids[j]]
+                            break
+                else:
+                    obj["metadata"] = mds[i]
+
+                if j in physical_cam_ids:
+                    obj["data"] = physical_buffers[physical_cam_ids[j]][i]
+                elif fmt == 'yuv':
                     buf_size = widths[j] * heights[j] * 3 / 2
                     obj["data"] = yuv_bufs[buf_size][i]
                 else:
@@ -839,11 +911,13 @@
     Returns:
         Nothing.
     """
+    ACTIVITY_START_WAIT = 1.5 # seconds
     adb = "adb -s " + device_id
 
-    # Start ItsTestActivity to prevent flaky
+    # Start ItsTestActivity to receive test results
     cmd = "%s shell am start %s --activity-brought-to-front" % (adb, ItsSession.ITS_TEST_ACTIVITY)
     _run(cmd)
+    time.sleep(ACTIVITY_START_WAIT)
 
     # Validate/process results argument
     for scene in results:
@@ -871,6 +945,20 @@
         print "ITS command string might be too long! len:", len(cmd)
     _run(cmd)
 
+def adb_log(device_id, msg):
+    """Send a log message to adb logcat
+
+    Args:
+        device_id: The ID string of the adb device
+        msg: the message string to be send to logcat
+
+    Returns:
+        Nothing.
+    """
+    adb = "adb -s " + device_id
+    cmd = "%s shell log -p i -t \"ItsTestHost\" %s" % (adb, msg)
+    _run(cmd)
+
 def get_device_fingerprint(device_id):
     """ Return the Build FingerPrint of the device that the test is running on.
 
diff --git a/apps/CameraITS/pymodules/its/image.py b/apps/CameraITS/pymodules/its/image.py
index c1bc0e2..3ea6fa3 100644
--- a/apps/CameraITS/pymodules/its/image.py
+++ b/apps/CameraITS/pymodules/its/image.py
@@ -151,7 +151,7 @@
     lsbs = img[::, 4::5].reshape(h,w/4)
     lsbs = numpy.right_shift(
             numpy.packbits(numpy.unpackbits(lsbs).reshape(h,w/4,4,2),3), 6)
-    # Pair the LSB bits group to pixel 0 instead of pixel 3
+    # Pair the LSB bits group to 0th pixel instead of 3rd pixel
     lsbs = lsbs.reshape(h,w/4,4)[:,:,::-1]
     lsbs = lsbs.reshape(h,w)
     # Fuse the MSBs and LSBs back together
@@ -267,8 +267,8 @@
                             buffer=cap["data"][0:w*h*2])
         img = img.astype(numpy.float32).reshape(h,w) / white_level
         # Crop the raw image to the active array region.
-        if props.has_key("android.sensor.info.activeArraySize") \
-                and props["android.sensor.info.activeArraySize"] is not None \
+        if props.has_key("android.sensor.info.preCorrectionActiveArraySize") \
+                and props["android.sensor.info.preCorrectionActiveArraySize"] is not None \
                 and props.has_key("android.sensor.info.pixelArraySize") \
                 and props["android.sensor.info.pixelArraySize"] is not None:
             # Note that the Rect class is defined such that the left,top values
@@ -277,10 +277,10 @@
             # computed as right-left, rather than right-left+1, etc.
             wfull = props["android.sensor.info.pixelArraySize"]["width"]
             hfull = props["android.sensor.info.pixelArraySize"]["height"]
-            xcrop = props["android.sensor.info.activeArraySize"]["left"]
-            ycrop = props["android.sensor.info.activeArraySize"]["top"]
-            wcrop = props["android.sensor.info.activeArraySize"]["right"]-xcrop
-            hcrop = props["android.sensor.info.activeArraySize"]["bottom"]-ycrop
+            xcrop = props["android.sensor.info.preCorrectionActiveArraySize"]["left"]
+            ycrop = props["android.sensor.info.preCorrectionActiveArraySize"]["top"]
+            wcrop = props["android.sensor.info.preCorrectionActiveArraySize"]["right"]-xcrop
+            hcrop = props["android.sensor.info.preCorrectionActiveArraySize"]["bottom"]-ycrop
             assert(wfull >= wcrop >= 0)
             assert(hfull >= hcrop >= 0)
             assert(wfull - wcrop >= xcrop >= 0)
@@ -414,7 +414,7 @@
     return img
 
 
-def get_black_level(chan, props, cap_res):
+def get_black_level(chan, props, cap_res=None):
     """Return the black level to use for a given capture.
 
     Uses a dynamic value from the capture result if available, else falls back
@@ -428,7 +428,7 @@
     Returns:
         The black level value for the specified channel.
     """
-    if (cap_res.has_key('android.sensor.dynamicBlackLevel') and
+    if (cap_res is not None and cap_res.has_key('android.sensor.dynamicBlackLevel') and
             cap_res['android.sensor.dynamicBlackLevel'] is not None):
         black_levels = cap_res['android.sensor.dynamicBlackLevel']
     else:
@@ -646,7 +646,10 @@
     ytile = int(math.ceil(ynorm * hfull))
     wtile = int(math.floor(wnorm * wfull))
     htile = int(math.floor(hnorm * hfull))
-    return img[ytile:ytile+htile,xtile:xtile+wtile,:].copy()
+    if len(img.shape)==2:
+        return img[ytile:ytile+htile,xtile:xtile+wtile].copy()
+    else:
+        return img[ytile:ytile+htile,xtile:xtile+wtile,:].copy()
 
 
 def compute_image_means(img):
@@ -697,6 +700,22 @@
     return snr
 
 
+def compute_image_max_gradients(img):
+    """Calculate the maximum gradient of each color channel in the image.
+
+    Args:
+        img: Numpy float image array, with pixel values in [0,1].
+
+    Returns:
+        A list of gradient max values, one per color channel in the image.
+    """
+    grads = []
+    chans = img.shape[2]
+    for i in xrange(chans):
+        grads.append(numpy.amax(numpy.gradient(img[:, :, i])))
+    return grads
+
+
 def write_image(img, fname, apply_gamma=False):
     """Save a float-3 numpy array image to a file.
 
@@ -780,6 +799,7 @@
     [gy, gx] = numpy.gradient(luma)
     return numpy.average(numpy.sqrt(gy*gy + gx*gx))
 
+
 def normalize_img(img):
     """Normalize the image values to between 0 and 1.
 
@@ -790,21 +810,39 @@
     """
     return (img - numpy.amin(img))/(numpy.amax(img) - numpy.amin(img))
 
-def flip_mirror_img_per_argv(img):
-    """Flip/mirror an image if "flip" or "mirror" is in argv
+
+def chart_located_per_argv():
+    """Determine if chart already located outside of test.
+
+    If chart info provided, return location and size. If not, return None.
+
+    Args:
+        None
+    Returns:
+        chart_loc:  float converted xnorm,ynorm,wnorm,hnorm,scale from argv text.
+                    argv is of form 'chart_loc=0.45,0.45,0.1,0.1,1.0'
+    """
+    for s in sys.argv[1:]:
+        if s[:10] == "chart_loc=" and len(s) > 10:
+            chart_loc = s[10:].split(",")
+            return map(float, chart_loc)
+    return None, None, None, None, None
+
+
+def rotate_img_per_argv(img):
+    """Rotate an image 180 degrees if "rotate" is in argv
 
     Args:
         img: 2-D numpy array of image values
     Returns:
-        Flip/mirrored image
+        Rotated image
     """
     img_out = img
-    if "flip" in sys.argv:
-        img_out = numpy.flipud(img_out)
-    if "mirror" in sys.argv:
-        img_out = numpy.fliplr(img_out)
+    if "rotate180" in sys.argv:
+        img_out = numpy.fliplr(numpy.flipud(img_out))
     return img_out
 
+
 def stationary_lens_cap(cam, req, fmt):
     """Take up to NUM_TRYS caps and save the 1st one with lens stationary.
 
@@ -829,6 +867,7 @@
             raise its.error.Error('Cannot settle lens after %d trys!' % trys)
     return cap[NUM_FRAMES-1]
 
+
 class __UnitTest(unittest.TestCase):
     """Run a suite of unit tests on this module.
     """
diff --git a/apps/CameraITS/pymodules/its/objects.py b/apps/CameraITS/pymodules/its/objects.py
index 8122186..a76c7d4 100644
--- a/apps/CameraITS/pymodules/its/objects.py
+++ b/apps/CameraITS/pymodules/its/objects.py
@@ -12,16 +12,9 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import os
-import os.path
-import sys
-import re
-import json
-import tempfile
-import time
-import unittest
-import subprocess
 import math
+import unittest
+
 
 def int_to_rational(i):
     """Function to convert Python integers to Camera2 rationals.
@@ -115,9 +108,10 @@
         #CONTRAST_CURVE mode
         if 0 in props["android.tonemap.availableToneMapModes"]:
             req["android.tonemap.mode"] = 0
-            req["android.tonemap.curveRed"] = [0.0,0.0, 1.0,1.0]
-            req["android.tonemap.curveGreen"] = [0.0,0.0, 1.0,1.0]
-            req["android.tonemap.curveBlue"] = [0.0,0.0, 1.0,1.0]
+            req["android.tonemap.curve"] = {
+                "red": [0.0,0.0, 1.0,1.0],
+                "green": [0.0,0.0, 1.0,1.0],
+                "blue": [0.0,0.0, 1.0,1.0]}
         #GAMMA_VALUE mode
         elif 3 in props["android.tonemap.availableToneMapModes"]:
             req["android.tonemap.mode"] = 3
@@ -228,12 +222,15 @@
     set_filter_off_or_fast_if_possible(props, req,
         "android.colorCorrection.availableAberrationModes",
         "android.colorCorrection.aberrationMode")
-    if props.has_key("android.request.availableCharacteristicsKeys"):
-        hot_pixel_modes = 393217 in props["android.request.availableCharacteristicsKeys"]
-        edge_modes = 196610 in props["android.request.availableCharacteristicsKeys"]
-    if props.has_key("android.request.availableRequestKeys"):
-        hot_pixel_mode = 393216 in props["android.request.availableRequestKeys"]
-        edge_mode = 196608 in props["android.request.availableRequestKeys"]
+    if props.has_key("camera.characteristics.keys"):
+        chars_keys = props["camera.characteristics.keys"]
+        hot_pixel_modes = \
+                "android.hotPixel.availableHotPixelModes" in chars_keys
+        edge_modes = "android.edge.availableEdgeModes" in chars_keys
+    if props.has_key("camera.characteristics.requestKeys"):
+        req_keys = props["camera.characteristics.requestKeys"]
+        hot_pixel_mode = "android.hotPixel.mode" in req_keys
+        edge_mode = "android.edge.mode" in req_keys
     if hot_pixel_modes and hot_pixel_mode:
         set_filter_off_or_fast_if_possible(props, req,
             "android.hotPixel.availableHotPixelModes",
@@ -244,7 +241,7 @@
             "android.edge.mode")
 
 def get_fastest_manual_capture_settings(props):
-    """Return a capture request and format spec for the fastest capture.
+    """Return a capture request and format spec for the fastest manual capture.
 
     Args:
         props: the object returned from its.device.get_camera_properties().
@@ -265,6 +262,26 @@
 
     return req, out_spec
 
+def get_fastest_auto_capture_settings(props):
+    """Return a capture request and format spec for the fastest auto capture.
+
+    Args:
+        props: the object returned from its.device.get_camera_properties().
+
+    Returns:
+        Two values, the first is a capture request, and the second is an output
+        format specification, for the fastest possible (legal) capture that
+        can be performed on this device (with the smallest output size).
+    """
+    fmt = "yuv"
+    size = get_available_output_sizes(fmt, props)[-1]
+    out_spec = {"format":fmt, "width":size[0], "height":size[1]}
+    req = auto_capture_request()
+
+    turn_slow_filters_off(props, req)
+
+    return req, out_spec
+
 
 def get_smallest_yuv_format(props, match_ar=None):
     """Return a capture request and format spec for the smallest yuv size.
@@ -282,22 +299,39 @@
     return fmt
 
 
-def get_largest_yuv_format(props):
-    """Return a capture request and format spec for the smallest yuv size.
+def get_largest_yuv_format(props, match_ar=None):
+    """Return a capture request and format spec for the largest yuv size.
 
     Args:
         props: the object returned from its.device.get_camera_properties().
 
     Returns:
-        fmt:    an output format specification, for the smallest possible yuv
+        fmt:    an output format specification, for the largest possible yuv
         format for this device.
     """
-    size = get_available_output_sizes("yuv", props)[0]
+    size = get_available_output_sizes("yuv", props, match_ar_size=match_ar)[0]
     fmt = {"format":"yuv", "width":size[0], "height":size[1]}
 
     return fmt
 
 
+def get_largest_jpeg_format(props, match_ar=None):
+    """Return a capture request and format spec for the largest jpeg size.
+
+    Args:
+        props:    the object returned from its.device.get_camera_properties().
+        match_ar: aspect ratio to match
+
+    Returns:
+        fmt:      an output format specification, for the largest possible jpeg
+        format for this device.
+    """
+    size = get_available_output_sizes("jpeg", props, match_ar_size=match_ar)[0]
+    fmt = {"format": "jpeg", "width": size[0], "height": size[1]}
+
+    return fmt
+
+
 def get_max_digital_zoom(props):
     """Returns the maximum amount of zooming possible by the camera device.
 
diff --git a/apps/CameraITS/pymodules/its/target.py b/apps/CameraITS/pymodules/its/target.py
index 3715f34..01d3c5f 100644
--- a/apps/CameraITS/pymodules/its/target.py
+++ b/apps/CameraITS/pymodules/its/target.py
@@ -69,9 +69,8 @@
     # Use the gains+transform returned by the AWB pass.
     req = its.objects.manual_capture_request(sens, exp_time)
     req["android.tonemap.mode"] = 0
-    req["android.tonemap.curveRed"] = tmap
-    req["android.tonemap.curveGreen"] = tmap
-    req["android.tonemap.curveBlue"] = tmap
+    req["android.tonemap.curve"] = {
+        "red": tmap, "green": tmap, "blue": tmap}
     req["android.colorCorrection.transform"] = xform_rat
     req["android.colorCorrection.gains"] = gains
     cap = its_session.do_capture(req)
diff --git a/apps/CameraITS/pymodules/its/test_images/rotated_chessboards/normal.jpg b/apps/CameraITS/pymodules/its/test_images/rotated_chessboards/normal.jpg
new file mode 100644
index 0000000..e6418be
--- /dev/null
+++ b/apps/CameraITS/pymodules/its/test_images/rotated_chessboards/normal.jpg
Binary files differ
diff --git a/apps/CameraITS/pymodules/its/test_images/rotated_chessboards/normal_15_ccw.jpg b/apps/CameraITS/pymodules/its/test_images/rotated_chessboards/normal_15_ccw.jpg
new file mode 100644
index 0000000..fa921c2
--- /dev/null
+++ b/apps/CameraITS/pymodules/its/test_images/rotated_chessboards/normal_15_ccw.jpg
Binary files differ
diff --git a/apps/CameraITS/pymodules/its/test_images/rotated_chessboards/normal_30_ccw.jpg b/apps/CameraITS/pymodules/its/test_images/rotated_chessboards/normal_30_ccw.jpg
new file mode 100644
index 0000000..907f0d2
--- /dev/null
+++ b/apps/CameraITS/pymodules/its/test_images/rotated_chessboards/normal_30_ccw.jpg
Binary files differ
diff --git a/apps/CameraITS/pymodules/its/test_images/rotated_chessboards/normal_45_ccw.jpg b/apps/CameraITS/pymodules/its/test_images/rotated_chessboards/normal_45_ccw.jpg
new file mode 100644
index 0000000..59dc939
--- /dev/null
+++ b/apps/CameraITS/pymodules/its/test_images/rotated_chessboards/normal_45_ccw.jpg
Binary files differ
diff --git a/apps/CameraITS/pymodules/its/test_images/rotated_chessboards/normal_60_ccw.jpg b/apps/CameraITS/pymodules/its/test_images/rotated_chessboards/normal_60_ccw.jpg
new file mode 100644
index 0000000..7d11c40
--- /dev/null
+++ b/apps/CameraITS/pymodules/its/test_images/rotated_chessboards/normal_60_ccw.jpg
Binary files differ
diff --git a/apps/CameraITS/pymodules/its/test_images/rotated_chessboards/normal_75_ccw.jpg b/apps/CameraITS/pymodules/its/test_images/rotated_chessboards/normal_75_ccw.jpg
new file mode 100644
index 0000000..1193bb1
--- /dev/null
+++ b/apps/CameraITS/pymodules/its/test_images/rotated_chessboards/normal_75_ccw.jpg
Binary files differ
diff --git a/apps/CameraITS/pymodules/its/test_images/rotated_chessboards/normal_90_ccw.jpg b/apps/CameraITS/pymodules/its/test_images/rotated_chessboards/normal_90_ccw.jpg
new file mode 100644
index 0000000..ea233ae
--- /dev/null
+++ b/apps/CameraITS/pymodules/its/test_images/rotated_chessboards/normal_90_ccw.jpg
Binary files differ
diff --git a/apps/CameraITS/pymodules/its/test_images/rotated_chessboards/wide.jpg b/apps/CameraITS/pymodules/its/test_images/rotated_chessboards/wide.jpg
new file mode 100644
index 0000000..a790506
--- /dev/null
+++ b/apps/CameraITS/pymodules/its/test_images/rotated_chessboards/wide.jpg
Binary files differ
diff --git a/apps/CameraITS/pymodules/its/test_images/rotated_chessboards/wide_15_ccw.jpg b/apps/CameraITS/pymodules/its/test_images/rotated_chessboards/wide_15_ccw.jpg
new file mode 100644
index 0000000..1871bab
--- /dev/null
+++ b/apps/CameraITS/pymodules/its/test_images/rotated_chessboards/wide_15_ccw.jpg
Binary files differ
diff --git a/apps/CameraITS/pymodules/its/test_images/rotated_chessboards/wide_30_ccw.jpg b/apps/CameraITS/pymodules/its/test_images/rotated_chessboards/wide_30_ccw.jpg
new file mode 100644
index 0000000..d3bff2a
--- /dev/null
+++ b/apps/CameraITS/pymodules/its/test_images/rotated_chessboards/wide_30_ccw.jpg
Binary files differ
diff --git a/apps/CameraITS/pymodules/its/test_images/rotated_chessboards/wide_45_ccw.jpg b/apps/CameraITS/pymodules/its/test_images/rotated_chessboards/wide_45_ccw.jpg
new file mode 100644
index 0000000..1298752
--- /dev/null
+++ b/apps/CameraITS/pymodules/its/test_images/rotated_chessboards/wide_45_ccw.jpg
Binary files differ
diff --git a/apps/CameraITS/pymodules/its/test_images/rotated_chessboards/wide_60_ccw.jpg b/apps/CameraITS/pymodules/its/test_images/rotated_chessboards/wide_60_ccw.jpg
new file mode 100644
index 0000000..642aeea
--- /dev/null
+++ b/apps/CameraITS/pymodules/its/test_images/rotated_chessboards/wide_60_ccw.jpg
Binary files differ
diff --git a/apps/CameraITS/pymodules/its/test_images/rotated_chessboards/wide_75_ccw.jpg b/apps/CameraITS/pymodules/its/test_images/rotated_chessboards/wide_75_ccw.jpg
new file mode 100644
index 0000000..b224ae4
--- /dev/null
+++ b/apps/CameraITS/pymodules/its/test_images/rotated_chessboards/wide_75_ccw.jpg
Binary files differ
diff --git a/apps/CameraITS/pymodules/its/test_images/rotated_chessboards/wide_90_ccw.jpg b/apps/CameraITS/pymodules/its/test_images/rotated_chessboards/wide_90_ccw.jpg
new file mode 100644
index 0000000..c2ad401
--- /dev/null
+++ b/apps/CameraITS/pymodules/its/test_images/rotated_chessboards/wide_90_ccw.jpg
Binary files differ
diff --git a/apps/CameraITS/tests/dng_noise_model/dng_noise_model.py b/apps/CameraITS/tests/dng_noise_model/dng_noise_model.py
index bea50af..e8c6d19 100644
--- a/apps/CameraITS/tests/dng_noise_model/dng_noise_model.py
+++ b/apps/CameraITS/tests/dng_noise_model/dng_noise_model.py
@@ -41,6 +41,7 @@
     """Capture a set of raw images with increasing gains and measure the noise.
     """
     NAME = os.path.basename(__file__).split(".")[0]
+    BAYER_LIST = ['R', 'GR', 'GB', 'B']
 
     # How many sensitivities per stop to sample.
     steps_per_stop = 2
@@ -97,6 +98,8 @@
             cam.do_3a(get_results=True, do_awb=False, do_af=False)
         # Underexpose to get more data for low signal levels.
         auto_e = s_ae*e_ae/bracket_factor
+        # Focus at zero to intentionally blur the scene as much as possible.
+        f_dist = 0.0
 
         # If the auto-exposure result is too bright for the highest
         # sensitivity or too dark for the lowest sensitivity, report
@@ -113,9 +116,9 @@
         # Start the sensitivities at the minimum.
         s = sens_min
 
-        samples = []
+        samples = [[], [], [], []]
         plots = []
-        measured_models = []
+        measured_models = [[], [], [], []]
         while s <= sens_max + 1:
             print "ISO %d" % round(s)
             fig = plt.figure()
@@ -124,15 +127,14 @@
             plt_s.set_xlabel("Mean signal level")
             plt_s.set_ylabel("Variance")
 
-            samples_s = []
+            samples_s = [[], [], [], []]
             for b in range(0, bracket_stops + 1):
                 # Get the exposure for this sensitivity and exposure time.
                 e = int(math.pow(2, b)*auto_e/float(s))
-                req = its.objects.manual_capture_request(round(s), e)
+                req = its.objects.manual_capture_request(round(s), e, f_dist)
                 cap = cam.do_capture(req, cam.CAP_RAW)
                 planes = its.image.convert_capture_to_planes(cap, props)
 
-                samples_e = []
                 for (pidx, p) in enumerate(planes):
                     p = p.squeeze()
 
@@ -156,138 +158,170 @@
                     vars_tiled = \
                         np.var(tile(hp, tile_size), axis=(0, 1)).flatten()
 
+                    samples_e = []
                     for (mean, var) in zip(means_tiled, vars_tiled):
                         # Don't include the tile if it has samples that might
                         # be clipped.
                         if mean + 2*math.sqrt(var) < max_signal_level:
                             samples_e.append([mean, var])
 
-                    means_e, vars_e = zip(*samples_e)
-                    plt_s.plot(means_e, vars_e, colors[b%len(colors)] + ',')
+                    if len(samples_e) > 0:
+                        means_e, vars_e = zip(*samples_e)
+                        plt_s.plot(means_e, vars_e, colors[b%len(colors)] + ',')
 
-                    samples_s.extend(samples_e)
+                        samples_s[pidx].extend(samples_e)
 
-            [S, O, R, p, stderr] = scipy.stats.linregress(samples_s)
-            measured_models.append([round(s), S, O])
-            print "Sensitivity %d: %e*y + %e (R=%f)" % (round(s), S, O, R)
+            for (pidx, p) in enumerate(samples_s):
+                [S, O, R, p, stderr] = scipy.stats.linregress(samples_s[pidx])
+                measured_models[pidx].append([round(s), S, O])
+                print "Sensitivity %d: %e*y + %e (R=%f)" % (round(s), S, O, R)
 
-            # Add the samples for this sensitivity to the global samples list.
-            samples.extend([(round(s), mean, var) for (mean, var) in samples_s])
+                # Add the samples for this sensitivity to the global samples list.
+                samples[pidx].extend([(round(s), mean, var) for (mean, var) in samples_s[pidx]])
 
-            # Add the linear fit to the plot for this sensitivity.
-            plt_s.plot([0, max_signal_level], [O, O + S*max_signal_level], 'r-',
-                       label="Linear fit")
-            xmax = max([x for (x, _) in samples_s])*1.25
+                # Add the linear fit to the plot for this sensitivity.
+                plt_s.plot([0, max_signal_level], [O, O + S*max_signal_level], 'rgkb'[pidx]+'--',
+                           label="Linear fit")
+
+            xmax = max([max([x for (x, _) in p]) for p in samples_s])*1.25
+            ymax = max([max([y for (_, y) in p]) for p in samples_s])*1.25
             plt_s.set_xlim(xmin=0, xmax=xmax)
-            plt_s.set_ylim(ymin=0, ymax=(O + S*xmax)*1.25)
+            plt_s.set_ylim(ymin=0, ymax=ymax)
+
             fig.savefig("%s_samples_iso%04d.png" % (NAME, round(s)))
             plots.append([round(s), fig])
 
             # Move to the next sensitivity.
             s *= math.pow(2, 1.0/steps_per_stop)
 
-        # Grab the sensitivities and line parameters from each sensitivity.
-        S_measured = [e[1] for e in measured_models]
-        O_measured = [e[2] for e in measured_models]
-        sens = np.asarray([e[0] for e in measured_models])
-        sens_sq = np.square(sens)
-
-        # Use a global linear optimization to fit the noise model.
-        gains = np.asarray([s[0] for s in samples])
-        means = np.asarray([s[1] for s in samples])
-        vars_ = np.asarray([s[2] for s in samples])
-
-        # Define digital gain as the gain above the max analog gain
-        # per the Camera2 spec. Also, define a corresponding C
-        # expression snippet to use in the generated model code.
-        digital_gains = np.maximum(gains/sens_max_analog, 1)
-        digital_gain_cdef = "(sens / %d.0) < 1.0 ? 1.0 : (sens / %d.0)" % \
-            (sens_max_analog, sens_max_analog)
-
-        # Find the noise model parameters via least squares fit.
-        ad = gains*means
-        bd = means
-        cd = gains*gains
-        dd = digital_gains*digital_gains
-        a = np.asarray([ad, bd, cd, dd]).T
-        b = vars_
-
-        # To avoid overfitting to high ISOs (high variances), divide the system
-        # by the gains.
-        a /= (np.tile(gains, (a.shape[1], 1)).T)
-        b /= gains
-
-        [A, B, C, D], _, _, _ = np.linalg.lstsq(a, b)
-
-        # Plot the noise model components with the values predicted by the
-        # noise model.
-        S_model = A*sens + B
-        O_model = \
-            C*sens_sq + D*np.square(np.maximum(sens/sens_max_analog, 1))
-
         (fig, (plt_S, plt_O)) = plt.subplots(2, 1)
         plt_S.set_title("Noise model")
         plt_S.set_ylabel("S")
-        plt_S.loglog(sens, S_measured, 'r+', basex=10, basey=10,
-                     label="Measured")
-        plt_S.loglog(sens, S_model, 'bx', basex=10, basey=10, label="Model")
         plt_S.legend(loc=2)
-
         plt_O.set_xlabel("ISO")
         plt_O.set_ylabel("O")
-        plt_O.loglog(sens, O_measured, 'r+', basex=10, basey=10,
-                     label="Measured")
-        plt_O.loglog(sens, O_model, 'bx', basex=10, basey=10, label="Model")
+
+        A = []
+        B = []
+        C = []
+        D = []
+        for (pidx, p) in enumerate(measured_models):
+            # Grab the sensitivities and line parameters from each sensitivity.
+            S_measured = [e[1] for e in measured_models[pidx]]
+            O_measured = [e[2] for e in measured_models[pidx]]
+            sens = np.asarray([e[0] for e in measured_models[pidx]])
+            sens_sq = np.square(sens)
+
+            # Use a global linear optimization to fit the noise model.
+            gains = np.asarray([s[0] for s in samples[pidx]])
+            means = np.asarray([s[1] for s in samples[pidx]])
+            vars_ = np.asarray([s[2] for s in samples[pidx]])
+
+            # Define digital gain as the gain above the max analog gain
+            # per the Camera2 spec. Also, define a corresponding C
+            # expression snippet to use in the generated model code.
+            digital_gains = np.maximum(gains/sens_max_analog, 1)
+            digital_gain_cdef = "(sens / %d.0) < 1.0 ? 1.0 : (sens / %d.0)" % \
+                (sens_max_analog, sens_max_analog)
+
+            # Find the noise model parameters via least squares fit.
+            ad = gains*means
+            bd = means
+            cd = gains*gains
+            dd = digital_gains*digital_gains
+            a = np.asarray([ad, bd, cd, dd]).T
+            b = vars_
+
+            # To avoid overfitting to high ISOs (high variances), divide the system
+            # by the gains.
+            a /= (np.tile(gains, (a.shape[1], 1)).T)
+            b /= gains
+
+            [A_p, B_p, C_p, D_p], _, _, _ = np.linalg.lstsq(a, b)
+            A.append(A_p)
+            B.append(B_p)
+            C.append(C_p)
+            D.append(D_p)
+
+            # Plot the noise model components with the values predicted by the
+            # noise model.
+            S_model = A_p*sens + B_p
+            O_model = \
+                C_p*sens_sq + D_p*np.square(np.maximum(sens/sens_max_analog, 1))
+
+            plt_S.loglog(sens, S_measured, 'rgkb'[pidx]+'+', basex=10, basey=10,
+                         label="Measured")
+            plt_S.loglog(sens, S_model, 'rgkb'[pidx]+'x', basex=10, basey=10, label="Model")
+
+            plt_O.loglog(sens, O_measured, 'rgkb'[pidx]+'+', basex=10, basey=10,
+                         label="Measured")
+            plt_O.loglog(sens, O_model, 'rgkb'[pidx]+'x', basex=10, basey=10, label="Model")
+
         fig.savefig("%s.png" % (NAME))
 
         for [s, fig] in plots:
             plt_s = fig.gca()
 
             dg = max(s/sens_max_analog, 1)
-            S = A*s + B
-            O = C*s*s + D*dg*dg
-            plt_s.plot([0, max_signal_level], [O, O + S*max_signal_level], 'b-',
-                       label="Model")
-            plt_s.legend(loc=2)
+            for (pidx, p) in enumerate(measured_models):
+                S = A[pidx]*s + B[pidx]
+                O = C[pidx]*s*s + D[pidx]*dg*dg
+                plt_s.plot([0, max_signal_level], [O, O + S*max_signal_level], 'rgkb'[pidx]+'-',
+                           label="Model")
 
+            plt_s.legend(loc=2)
             plt.figure(fig.number)
 
             # Re-save the plot with the global model.
             fig.savefig("%s_samples_iso%04d.png" % (NAME, round(s)))
 
-        # Generate the noise model implementation.
+          # Generate the noise model implementation.
+        A_array = ",".join([str(i) for i in A])
+        B_array = ",".join([str(i) for i in B])
+        C_array = ",".join([str(i) for i in C])
+        D_array = ",".join([str(i) for i in D])
         noise_model_code = textwrap.dedent("""\
             /* Generated test code to dump a table of data for external validation
              * of the noise model parameters.
              */
             #include <stdio.h>
             #include <assert.h>
-            double compute_noise_model_entry_S(int sens);
-            double compute_noise_model_entry_O(int sens);
+            double compute_noise_model_entry_S(int plane, int sens);
+            double compute_noise_model_entry_O(int plane, int sens);
             int main(void) {
-                int sens;
-                for (sens = %d; sens <= %d; sens += 100) {
-                    double o = compute_noise_model_entry_O(sens);
-                    double s = compute_noise_model_entry_S(sens);
-                    printf("%%d,%%lf,%%lf\\n", sens, o, s);
+                for (int plane = 0; plane < %d; plane++) {
+                    for (int sens = %d; sens <= %d; sens += 100) {
+                        double o = compute_noise_model_entry_O(plane, sens);
+                        double s = compute_noise_model_entry_S(plane, sens);
+                        printf("%%d,%%d,%%lf,%%lf\\n", plane, sens, o, s);
+                    }
                 }
                 return 0;
             }
 
             /* Generated functions to map a given sensitivity to the O and S noise
-             * model parameters in the DNG noise model.
+             * model parameters in the DNG noise model. The planes are in
+             * R, Gr, Gb, B order.
              */
-            double compute_noise_model_entry_S(int sens) {
-                double s = %e * sens + %e;
+            double compute_noise_model_entry_S(int plane, int sens) {
+                static double noise_model_A[] = { %s };
+                static double noise_model_B[] = { %s };
+                double A = noise_model_A[plane];
+                double B = noise_model_B[plane];
+                double s = A * sens + B;
                 return s < 0.0 ? 0.0 : s;
             }
 
-            double compute_noise_model_entry_O(int sens) {
+            double compute_noise_model_entry_O(int plane, int sens) {
+                static double noise_model_C[] = { %s };
+                static double noise_model_D[] = { %s };
                 double digital_gain = %s;
-                double o = %e * sens * sens + %e * digital_gain * digital_gain;
+                double C = noise_model_C[plane];
+                double D = noise_model_D[plane];
+                double o = C * sens * sens + D * digital_gain * digital_gain;
                 return o < 0.0 ? 0.0 : o;
             }
-            """ % (sens_min, sens_max, A, B, digital_gain_cdef, C, D))
+            """ % (len(A), sens_min, sens_max, A_array, B_array, C_array, D_array, digital_gain_cdef))
         print noise_model_code
         text_file = open("noise_model.c", "w")
         text_file.write("%s" % noise_model_code)
@@ -295,4 +329,3 @@
 
 if __name__ == '__main__':
     main()
-
diff --git a/apps/CameraITS/tests/inprog/test_3a_remote.py b/apps/CameraITS/tests/inprog/test_3a_remote.py
index c76ff6d..1efc1aa 100644
--- a/apps/CameraITS/tests/inprog/test_3a_remote.py
+++ b/apps/CameraITS/tests/inprog/test_3a_remote.py
@@ -29,8 +29,6 @@
 
     with its.device.ItsSession() as cam:
         props = cam.get_camera_properties()
-        w_map = props["android.lens.info.shadingMapSize"]["width"]
-        h_map = props["android.lens.info.shadingMapSize"]["height"]
 
         # TODO: Test for 3A convergence, and exit this test once converged.
 
@@ -50,7 +48,10 @@
             gains = cap["metadata"]["android.colorCorrection.gains"]
             transform = cap["metadata"]["android.colorCorrection.transform"]
             exp_time = cap["metadata"]['android.sensor.exposureTime']
-            lsc_map = cap["metadata"]["android.statistics.lensShadingMap"]
+            lsc_obj = cap_res["android.statistics.lensShadingCorrectionMap"]
+            lsc_map = lsc_obj["map"]
+            w_map = lsc_obj["width"]
+            h_map = lsc_obj["height"]
             foc_dist = cap["metadata"]['android.lens.focusDistance']
             foc_range = cap["metadata"]['android.lens.focusRange']
 
diff --git a/apps/CameraITS/tests/inprog/test_blc_lsc.py b/apps/CameraITS/tests/inprog/test_blc_lsc.py
index 32c0c49..1e417c1 100644
--- a/apps/CameraITS/tests/inprog/test_blc_lsc.py
+++ b/apps/CameraITS/tests/inprog/test_blc_lsc.py
@@ -63,9 +63,8 @@
         for e in exposures:
             req = its.objects.manual_capture_request(ae_sen,e)
             req["android.tonemap.mode"] = 0
-            req["android.tonemap.curveRed"] = tmap
-            req["android.tonemap.curveGreen"] = tmap
-            req["android.tonemap.curveBlue"] = tmap
+            req["android.tonemap.curve"] = {
+                "red": tmap, "green": tmap, "blue": tmap}
             req["android.colorCorrection.transform"] = awb_transform_rat
             req["android.colorCorrection.gains"] = awb_gains
             reqs.append(req)
diff --git a/apps/CameraITS/tests/inprog/test_test_patterns.py b/apps/CameraITS/tests/inprog/test_test_patterns.py
deleted file mode 100644
index f75b141..0000000
--- a/apps/CameraITS/tests/inprog/test_test_patterns.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# Copyright 2014 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import its.image
-import its.device
-import its.objects
-import os.path
-
-def main():
-    """Test sensor test patterns.
-    """
-    NAME = os.path.basename(__file__).split(".")[0]
-
-    with its.device.ItsSession() as cam:
-        caps = []
-        for i in range(1,6):
-            req = its.objects.manual_capture_request(100, 10*1000*1000)
-            req['android.sensor.testPatternData'] = [40, 100, 160, 220]
-            req['android.sensor.testPatternMode'] = i
-
-            # Capture the shot twice, and use the second one, so the pattern
-            # will have stabilized.
-            caps = cam.do_capture([req]*2)
-
-            img = its.image.convert_capture_to_rgb_image(caps[1])
-            its.image.write_image(img, "%s_pattern=%d.jpg" % (NAME, i))
-
-if __name__ == '__main__':
-    main()
-
diff --git a/apps/CameraITS/tests/scene0/test_burst_capture.py b/apps/CameraITS/tests/scene0/test_burst_capture.py
index e6ee100..c573584 100644
--- a/apps/CameraITS/tests/scene0/test_burst_capture.py
+++ b/apps/CameraITS/tests/scene0/test_burst_capture.py
@@ -12,21 +12,26 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import its.image
-import its.device
-import its.objects
 import os.path
 
+import its.caps
+import its.device
+import its.image
+import its.objects
+
+
 def main():
     """Test capture a burst of full size images is fast enough to not timeout.
+
        This test verify that entire capture pipeline can keep up the speed
        of fullsize capture + CPU read for at least some time.
     """
     NAME = os.path.basename(__file__).split(".")[0]
-    NUM_TEST_FRAMES = 20
+    NUM_TEST_FRAMES = 15
 
     with its.device.ItsSession() as cam:
         props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.backward_compatible(props))
         req = its.objects.auto_capture_request()
         caps = cam.do_capture([req]*NUM_TEST_FRAMES)
 
diff --git a/apps/CameraITS/tests/scene0/test_camera_properties.py b/apps/CameraITS/tests/scene0/test_camera_properties.py
index eb638f0..dbd528d 100644
--- a/apps/CameraITS/tests/scene0/test_camera_properties.py
+++ b/apps/CameraITS/tests/scene0/test_camera_properties.py
@@ -26,8 +26,6 @@
 
         pprint.pprint(props)
 
-        its.caps.skip_unless(its.caps.manual_sensor(props))
-
         # Test that a handful of required keys are present.
         assert(props.has_key('android.sensor.info.sensitivityRange'))
         assert(props.has_key('android.sensor.orientation'))
diff --git a/apps/CameraITS/tests/scene0/test_metadata.py b/apps/CameraITS/tests/scene0/test_metadata.py
index 752e02b..b8949b1 100644
--- a/apps/CameraITS/tests/scene0/test_metadata.py
+++ b/apps/CameraITS/tests/scene0/test_metadata.py
@@ -31,6 +31,7 @@
         # Arbitrary capture request exposure values; image content is not
         # important for this test, only the metadata.
         props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.backward_compatible(props))
         auto_req = its.objects.auto_capture_request()
         cap = cam.do_capture(auto_req)
         md = cap["metadata"]
@@ -90,8 +91,8 @@
         pixel_pitch_w = (sensor_size["width"] / fmts[0]["width"] * 1E3)
         print "Assert pixel_pitch WxH: %.2f um, %.2f um" % (pixel_pitch_w,
                                                             pixel_pitch_h)
-        assert 1.0 <= pixel_pitch_w <= 10
-        assert 1.0 <= pixel_pitch_h <= 10
+        assert 0.7 <= pixel_pitch_w <= 10
+        assert 0.7 <= pixel_pitch_h <= 10
         assert 0.333 <= pixel_pitch_w/pixel_pitch_h <= 3.0
 
         diag = math.sqrt(sensor_size["height"] ** 2 +
diff --git a/apps/CameraITS/tests/scene0/test_param_sensitivity_burst.py b/apps/CameraITS/tests/scene0/test_param_sensitivity_burst.py
index c3c2147..b716141 100644
--- a/apps/CameraITS/tests/scene0/test_param_sensitivity_burst.py
+++ b/apps/CameraITS/tests/scene0/test_param_sensitivity_burst.py
@@ -12,19 +12,21 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import its.image
 import its.caps
 import its.device
+import its.image
 import its.objects
 import its.target
 
-def main():
-    """Test that the android.sensor.sensitivity parameter is applied properly
-    within a burst. Inspects the output metadata only (not the image data).
-    """
+NUM_STEPS = 3
+ERROR_TOLERANCE = 0.96  # Allow ISO to be rounded down by 4%
 
-    NUM_STEPS = 3
-    ERROR_TOLERANCE = 0.97 # Allow ISO to be rounded down by 3%
+
+def main():
+    """Test android.sensor.sensitivity parameter applied properly in burst.
+
+    Inspects the output metadata only (not the image data).
+    """
 
     with its.device.ItsSession() as cam:
         props = cam.get_camera_properties()
@@ -35,15 +37,17 @@
         sens_step = (sens_range[1] - sens_range[0]) / NUM_STEPS
         sens_list = range(sens_range[0], sens_range[1], sens_step)
         e = min(props['android.sensor.info.exposureTimeRange'])
-        reqs = [its.objects.manual_capture_request(s,e) for s in sens_list]
-        _,fmt = its.objects.get_fastest_manual_capture_settings(props)
+        reqs = [its.objects.manual_capture_request(s, e) for s in sens_list]
+        _, fmt = its.objects.get_fastest_manual_capture_settings(props)
 
         caps = cam.do_capture(reqs, fmt)
-        for i,cap in enumerate(caps):
+        for i, cap in enumerate(caps):
             s_req = sens_list[i]
-            s_res = cap["metadata"]["android.sensor.sensitivity"]
-            assert(s_req >= s_res)
-            assert(s_res/float(s_req) > ERROR_TOLERANCE)
+            s_res = cap['metadata']['android.sensor.sensitivity']
+            msg = 's_write: %d, s_read: %d, TOL: %.2f' % (s_req, s_res,
+                                                          ERROR_TOLERANCE)
+            assert s_req >= s_res, msg
+            assert s_res/float(s_req) > ERROR_TOLERANCE, msg
 
 if __name__ == '__main__':
     main()
diff --git a/apps/CameraITS/tests/scene0/test_read_write.py b/apps/CameraITS/tests/scene0/test_read_write.py
new file mode 100644
index 0000000..1b76806
--- /dev/null
+++ b/apps/CameraITS/tests/scene0/test_read_write.py
@@ -0,0 +1,117 @@
+# Copyright 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os.path
+
+import its.caps
+import its.device
+import its.image
+import its.objects
+
+NAME = os.path.basename(__file__).split('.')[0]
+RTOL_EXP_GAIN = 0.97
+TEST_EXP_RANGE = [6E6, 1E9]  # ns [6ms, 1s]
+
+
+def main():
+    """Test that the device will write/read correct exp/gain values."""
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.manual_sensor(props) and
+                             its.caps.per_frame_control(props))
+
+        # determine capture format
+        debug = its.caps.debug_mode()
+        largest_yuv = its.objects.get_largest_yuv_format(props)
+        if debug:
+            fmt = largest_yuv
+        else:
+            match_ar = (largest_yuv['width'], largest_yuv['height'])
+            fmt = its.objects.get_smallest_yuv_format(props, match_ar=match_ar)
+
+        # grab exp/gain ranges from camera
+        sensor_exp_range = props['android.sensor.info.exposureTimeRange']
+        sens_range = props['android.sensor.info.sensitivityRange']
+        print 'sensor e range:', sensor_exp_range
+        print 'sensor s range:', sens_range
+
+        # determine if exposure test range is within sensor reported range
+        exp_range = []
+        if sensor_exp_range[0] < TEST_EXP_RANGE[0]:
+            exp_range.append(TEST_EXP_RANGE[0])
+        else:
+            exp_range.append(sensor_exp_range[0])
+        if sensor_exp_range[1] > TEST_EXP_RANGE[1]:
+            exp_range.append(TEST_EXP_RANGE[1])
+        else:
+            exp_range.append(sensor_exp_range[1])
+
+        # build requests
+        reqs = []
+        index_list = []
+        for exp in exp_range:
+            for sens in sens_range:
+                reqs.append(its.objects.manual_capture_request(sens, exp))
+                index_list.append((exp, sens))
+
+        # take shots
+        caps = cam.do_capture(reqs, fmt)
+
+        # extract exp/sensitivity data
+        data = {}
+        for i, cap in enumerate(caps):
+            e_read = cap['metadata']['android.sensor.exposureTime']
+            s_read = cap['metadata']['android.sensor.sensitivity']
+            data[index_list[i]] = (e_read, s_read)
+
+        # check read/write match across all shots
+        e_failed = []
+        s_failed = []
+        for e_write in exp_range:
+            for s_write in sens_range:
+                (e_read, s_read) = data[(e_write, s_write)]
+                if e_write < e_read or e_read/float(e_write) <= RTOL_EXP_GAIN:
+                    e_failed.append({'e_write': e_write,
+                                     'e_read': e_read,
+                                     's_write': s_write,
+                                     's_read': s_read})
+                if s_write < s_read or s_read/float(s_write) <= RTOL_EXP_GAIN:
+                    s_failed.append({'e_write': e_write,
+                                     'e_read': e_read,
+                                     's_write': s_write,
+                                     's_read': s_read})
+
+        # print results
+        if e_failed:
+            print '\nFAILs for exposure time'
+            for fail in e_failed:
+                print ' e_write: %d, e_read: %d, RTOL: %.2f, ' % (
+                        fail['e_write'], fail['e_read'], RTOL_EXP_GAIN),
+                print 's_write: %d, s_read: %d, RTOL: %.2f' % (
+                        fail['s_write'], fail['s_read'], RTOL_EXP_GAIN)
+        if s_failed:
+            print 'FAILs for sensitivity(ISO)'
+            for fail in s_failed:
+                print 's_write: %d, s_read: %d, RTOL: %.2f, ' % (
+                        fail['s_write'], fail['s_read'], RTOL_EXP_GAIN),
+                print ' e_write: %d, e_read: %d, RTOL: %.2f' % (
+                        fail['e_write'], fail['e_read'], RTOL_EXP_GAIN)
+
+        # assert PASS/FAIL
+        assert not e_failed+s_failed
+
+
+if __name__ == '__main__':
+    main()
diff --git a/apps/CameraITS/tests/scene0/test_test_patterns.py b/apps/CameraITS/tests/scene0/test_test_patterns.py
new file mode 100644
index 0000000..a1d9cb8
--- /dev/null
+++ b/apps/CameraITS/tests/scene0/test_test_patterns.py
@@ -0,0 +1,174 @@
+# Copyright 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+
+import its.caps
+import its.device
+import its.image
+import its.objects
+import numpy as np
+
+NAME = os.path.basename(__file__).split('.')[0]
+PATTERNS = [1, 2]
+COLOR_BAR_ORDER = ['WHITE', 'YELLOW', 'CYAN', 'GREEN', 'MAGENTA', 'RED',
+                   'BLUE', 'BLACK']
+COLOR_CHECKER = {'BLACK': [0, 0, 0], 'RED': [1, 0, 0], 'GREEN': [0, 1, 0],
+                 'BLUE': [0, 0, 1], 'MAGENTA': [1, 0, 1], 'CYAN': [0, 1, 1],
+                 'YELLOW': [1, 1, 0], 'WHITE': [1, 1, 1]}
+CH_TOL = 2E-3  # 1/2 DN in [0:1]
+LSFR_COEFFS = 0b100010000  # PN9
+
+
+def check_solid_color(cap, props):
+    """Simple test for solid color.
+
+    Args:
+        cap: capture element
+        props: capture properties
+    Returns:
+        True/False
+    """
+    print 'Checking solid TestPattern...'
+    r, gr, gb, b = its.image.convert_capture_to_planes(cap, props)
+    r_tile = its.image.get_image_patch(r, 0.0, 0.0, 1.0, 1.0)
+    gr_tile = its.image.get_image_patch(gr, 0.0, 0.0, 1.0, 1.0)
+    gb_tile = its.image.get_image_patch(gb, 0.0, 0.0, 1.0, 1.0)
+    b_tile = its.image.get_image_patch(b, 0.0, 0.0, 1.0, 1.0)
+    var_max = max(np.amax(r_tile), np.amax(gr_tile), np.amax(gb_tile),
+                  np.amax(b_tile))
+    var_min = min(np.amin(r_tile), np.amin(gr_tile), np.amin(gb_tile),
+                  np.amin(b_tile))
+    white_level = int(props['android.sensor.info.whiteLevel'])
+    print ' pixel min: %.f, pixel max: %.f' % (white_level*var_min,
+                                               white_level*var_max)
+    return np.isclose(var_max, var_min, atol=CH_TOL)
+
+
+def check_color_bars(cap, props, mirror=False):
+    """Test image for color bars.
+
+    Compute avg of bars and compare to ideal
+
+    Args:
+        cap:            capture element
+        props:          capture properties
+        mirror (bool):  whether to mirror image or not
+    Returns:
+        True/False
+    """
+    print 'Checking color bar TestPattern...'
+    delta = 0.0005
+    num_bars = len(COLOR_BAR_ORDER)
+    color_match = []
+    img = its.image.convert_capture_to_rgb_image(cap, props=props)
+    if mirror:
+        print ' Image mirrored'
+        img = np.fliplr(img)
+    for i, color in enumerate(COLOR_BAR_ORDER):
+        tile = its.image.get_image_patch(img, float(i)/num_bars+delta,
+                                         0.0, 1.0/num_bars-2*delta, 1.0)
+        color_match.append(np.allclose(its.image.compute_image_means(tile),
+                                       COLOR_CHECKER[color], atol=CH_TOL))
+    print COLOR_BAR_ORDER
+    print color_match
+    return all(color_match)
+
+
+def check_pattern(cap, props, pattern):
+    """Simple tests for pattern correctness.
+
+    Args:
+        cap: capture element
+        props: capture properties
+        pattern (int): valid number for pattern
+    Returns:
+        boolean
+    """
+
+    # white_level = int(props['android.sensor.info.whiteLevel'])
+    if pattern == 1:  # solid color
+        return check_solid_color(cap, props)
+
+    elif pattern == 2:  # color bars
+        striped = check_color_bars(cap, props, mirror=False)
+        # check mirrored version in case image rotated from sensor orientation
+        if not striped:
+            striped = check_color_bars(cap, props, mirror=True)
+        return striped
+
+    else:
+        print 'No specific test for TestPattern %d' % pattern
+        return True
+
+
+def test_test_patterns(cam, props, af_fd):
+    """test image sensor test patterns.
+
+    Args:
+        cam: An open device session.
+        props: Properties of cam
+        af_fd: Focus distance
+    """
+
+    avail_patterns = props['android.sensor.availableTestPatternModes']
+    print 'avail_patterns: ', avail_patterns
+    sens_min, _ = props['android.sensor.info.sensitivityRange']
+    exposure = min(props['android.sensor.info.exposureTimeRange'])
+
+    for pattern in PATTERNS:
+        if pattern in avail_patterns:
+            req = its.objects.manual_capture_request(int(sens_min),
+                                                     exposure)
+            req['android.lens.focusDistance'] = af_fd
+            req['android.sensor.testPatternMode'] = pattern
+            fmt = {'format': 'raw'}
+            cap = cam.do_capture(req, fmt)
+            img = its.image.convert_capture_to_rgb_image(cap, props=props)
+
+            # Save pattern
+            its.image.write_image(img, '%s_%d.jpg' % (NAME, pattern), True)
+
+            # Check pattern for correctness
+            assert check_pattern(cap, props, pattern)
+        else:
+            print 'Pattern not in android.sensor.availableTestPatternModes.'
+
+
+def main():
+    """Test pattern generation test.
+
+    Test: capture frames for each valid test pattern and check if
+    generated correctly.
+    android.sensor.testPatternMode
+    0: OFF
+    1: SOLID_COLOR
+    2: COLOR_BARS
+    3: COLOR_BARS_FADE_TO_GREY
+    4: PN9
+    """
+
+    print '\nStarting %s' % NAME
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.raw16(props) and
+                             its.caps.manual_sensor(props) and
+                             its.caps.per_frame_control(props))
+
+        # For test pattern, use min_fd
+        fd = props['android.lens.info.minimumFocusDistance']
+        test_test_patterns(cam, props, fd)
+
+if __name__ == '__main__':
+    main()
diff --git a/apps/CameraITS/tests/scene0/test_unified_timestamps.py b/apps/CameraITS/tests/scene0/test_unified_timestamps.py
index a496fb3..5a9228e 100644
--- a/apps/CameraITS/tests/scene0/test_unified_timestamps.py
+++ b/apps/CameraITS/tests/scene0/test_unified_timestamps.py
@@ -25,10 +25,14 @@
         props = cam.get_camera_properties()
 
         # Only run test if the appropriate caps are claimed.
-        its.caps.skip_unless(its.caps.sensor_fusion(props))
+        its.caps.skip_unless(its.caps.sensor_fusion(props) and
+                             its.caps.backward_compatible(props))
 
         # Get the timestamp of a captured image.
-        req, fmt = its.objects.get_fastest_manual_capture_settings(props)
+        if its.caps.manual_sensor(props):
+            req, fmt = its.objects.get_fastest_manual_capture_settings(props)
+        else:
+            req, fmt = its.objects.get_fastest_auto_capture_settings(props)
         cap = cam.do_capture(req, fmt)
         ts_image0 = cap['metadata']['android.sensor.timestamp']
 
diff --git a/apps/CameraITS/tests/scene1/scene1_0.67_scaled.pdf b/apps/CameraITS/tests/scene1/scene1_0.67_scaled.pdf
new file mode 100644
index 0000000..3103cd8
--- /dev/null
+++ b/apps/CameraITS/tests/scene1/scene1_0.67_scaled.pdf
Binary files differ
diff --git a/apps/CameraITS/tests/scene1/test_3a.py b/apps/CameraITS/tests/scene1/test_3a.py
index 08cd747..65cac71 100644
--- a/apps/CameraITS/tests/scene1/test_3a.py
+++ b/apps/CameraITS/tests/scene1/test_3a.py
@@ -12,8 +12,11 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import its.device
 import its.caps
+import its.device
+
+import numpy as np
+
 
 def main():
     """Basic test for bring-up of 3A.
@@ -24,16 +27,22 @@
     with its.device.ItsSession() as cam:
         props = cam.get_camera_properties()
         its.caps.skip_unless(its.caps.read_3a(props))
+        mono_camera = its.caps.mono_camera(props)
 
-        sens, exp, gains, xform, focus = cam.do_3a(get_results=True)
-        print "AE: sensitivity %d, exposure %dms" % (sens, exp/1000000)
-        print "AWB: gains", gains, "transform", xform
-        print "AF: distance", focus
-        assert(sens > 0)
-        assert(exp > 0)
-        assert(len(gains) == 4)
-        assert(len(xform) == 9)
-        assert(focus >= 0)
+        sens, exp, gains, xform, focus = cam.do_3a(get_results=True,
+                                                   mono_camera=mono_camera)
+        print 'AE: sensitivity %d, exposure %dms' % (sens, exp/1000000)
+        print 'AWB: gains', gains, 'transform', xform
+        print 'AF: distance', focus
+        assert sens > 0
+        assert exp > 0
+        assert len(gains) == 4
+        for g in gains:
+            assert not np.isnan(g)
+        assert len(xform) == 9
+        for x in xform:
+            assert not np.isnan(x)
+        assert focus >= 0
 
 if __name__ == '__main__':
     main()
diff --git a/apps/CameraITS/tests/scene1/test_ae_af.py b/apps/CameraITS/tests/scene1/test_ae_af.py
index 626a475..fabc494 100644
--- a/apps/CameraITS/tests/scene1/test_ae_af.py
+++ b/apps/CameraITS/tests/scene1/test_ae_af.py
@@ -16,7 +16,7 @@
 import its.device
 import its.target
 
-import numpy
+import numpy as np
 
 GAIN_LENGTH = 4
 TRANSFORM_LENGTH = 9
@@ -35,16 +35,18 @@
     with its.device.ItsSession() as cam:
         props = cam.get_camera_properties()
         its.caps.skip_unless(its.caps.read_3a(props))
+        mono_camera = its.caps.mono_camera(props)
 
         for k, v in sorted(SINGLE_A.items()):
             print k
             try:
-                s, e, g, xform, fd = cam.do_3a(get_results=True,
-                                               do_ae=v[0],
-                                               do_af=v[1],
-                                               do_awb=v[2])
+                s, e, gains, xform, fd = cam.do_3a(get_results=True,
+                                                   do_ae=v[0],
+                                                   do_af=v[1],
+                                                   do_awb=v[2],
+                                                   mono_camera=mono_camera)
                 print ' sensitivity', s, 'exposure', e
-                print ' gains', g, 'transform', xform
+                print ' gains', gains, 'transform', xform
                 print ' fd', fd
                 print ''
             except its.error.Error:
@@ -52,10 +54,14 @@
             if k == 'full_3a':
                 assert s > 0
                 assert e > 0
-                assert len(g) == 4
+                assert len(gains) == 4
+                for g in gains:
+                    assert not np.isnan(g)
                 assert len(xform) == 9
+                for x in xform:
+                    assert not np.isnan(x)
                 assert fd >= 0
-                assert numpy.isclose(g[2], GREEN_GAIN, GREEN_GAIN_TOL)
+                assert np.isclose(gains[2], GREEN_GAIN, GREEN_GAIN_TOL)
 
 if __name__ == '__main__':
     main()
diff --git a/apps/CameraITS/tests/scene1/test_ae_precapture_trigger.py b/apps/CameraITS/tests/scene1/test_ae_precapture_trigger.py
index bb91c9a..a626ee4 100644
--- a/apps/CameraITS/tests/scene1/test_ae_precapture_trigger.py
+++ b/apps/CameraITS/tests/scene1/test_ae_precapture_trigger.py
@@ -18,7 +18,7 @@
 import its.target
 
 AE_FRAMES_PER_ITERATION = 8
-AE_CONVERGE_ITERATIONS = 3
+AE_CONVERGE_ITERATIONS = 5
 # AE must converge within this number of auto requests under scene1
 THRESH_AE_CONVERGE = AE_FRAMES_PER_ITERATION * AE_CONVERGE_ITERATIONS
 
diff --git a/apps/CameraITS/tests/scene1/test_auto_vs_manual.py b/apps/CameraITS/tests/scene1/test_auto_vs_manual.py
index 849c720..a7b5add 100644
--- a/apps/CameraITS/tests/scene1/test_auto_vs_manual.py
+++ b/apps/CameraITS/tests/scene1/test_auto_vs_manual.py
@@ -18,6 +18,7 @@
 import its.objects
 import os.path
 import math
+import numpy as np
 
 def main():
     """Capture auto and manual shots that should look the same.
@@ -35,6 +36,7 @@
         its.caps.skip_unless(its.caps.manual_sensor(props) and
                              its.caps.manual_post_proc(props) and
                              its.caps.per_frame_control(props))
+        mono_camera = its.caps.mono_camera(props)
 
         # Converge 3A and get the estimates.
         debug = its.caps.debug_mode()
@@ -44,7 +46,8 @@
         else:
             match_ar = (largest_yuv['width'], largest_yuv['height'])
             fmt = its.objects.get_smallest_yuv_format(props, match_ar=match_ar)
-        sens, exp, gains, xform, focus = cam.do_3a(get_results=True)
+        sens, exp, gains, xform, focus = cam.do_3a(get_results=True,
+                                                   mono_camera=mono_camera)
         xform_rat = its.objects.float_to_rational(xform)
         print "AE sensitivity %d, exposure %dms" % (sens, exp/1000000.0)
         print "AWB gains", gains
@@ -78,9 +81,8 @@
         # Manual capture 2: WB + tonemap
         gamma = sum([[i/63.0,math.pow(i/63.0,1/2.2)] for i in xrange(64)],[])
         req["android.tonemap.mode"] = 0
-        req["android.tonemap.curveRed"] = gamma
-        req["android.tonemap.curveGreen"] = gamma
-        req["android.tonemap.curveBlue"] = gamma
+        req["android.tonemap.curve"] = {
+            "red": gamma, "green": gamma, "blue": gamma}
         cap_man2 = cam.do_capture(req, fmt)
         img_man2 = its.image.convert_capture_to_rgb_image(cap_man2)
         its.image.write_image(img_man2, "%s_manual_wb_tm.jpg" % (NAME))
@@ -92,10 +94,14 @@
 
         # Check that the WB gains and transform reported in each capture
         # result match with the original AWB estimate from do_3a.
-        for g,x in [(gains_a,xform_a),(gains_m1,xform_m1),(gains_m2,xform_m2)]:
+        for g,x in [(gains_m1,xform_m1),(gains_m2,xform_m2)]:
             assert(all([abs(xform[i] - x[i]) < 0.05 for i in range(9)]))
             assert(all([abs(gains[i] - g[i]) < 0.05 for i in range(4)]))
 
+        # Check that auto AWB settings are close
+        assert(all([np.isclose(xform_a[i], xform[i], rtol=0.25, atol=0.1) for i in range(9)]))
+        assert(all([np.isclose(gains_a[i], gains[i], rtol=0.25, atol=0.1) for i in range(4)]))
+
 if __name__ == '__main__':
     main()
 
diff --git a/apps/CameraITS/tests/scene1/test_capture_result.py b/apps/CameraITS/tests/scene1/test_capture_result.py
index e797ec0..a3b81fa 100644
--- a/apps/CameraITS/tests/scene1/test_capture_result.py
+++ b/apps/CameraITS/tests/scene1/test_capture_result.py
@@ -63,24 +63,21 @@
             "android.colorCorrection.transform": manual_transform,
             "android.colorCorrection.gains": manual_gains,
             "android.tonemap.mode": 0,
-            "android.tonemap.curveRed": manual_tonemap,
-            "android.tonemap.curveGreen": manual_tonemap,
-            "android.tonemap.curveBlue": manual_tonemap,
+            "android.tonemap.curve": {"red": manual_tonemap,
+                                      "green": manual_tonemap,
+                                      "blue": manual_tonemap},
             "android.control.aeRegions": manual_region,
             "android.control.afRegions": manual_region,
             "android.control.awbRegions": manual_region,
             "android.statistics.lensShadingMapMode":1
             }
 
-        w_map = props["android.lens.info.shadingMapSize"]["width"]
-        h_map = props["android.lens.info.shadingMapSize"]["height"]
-
         print "Testing auto capture results"
-        lsc_map_auto = test_auto(cam, w_map, h_map, props)
+        lsc_map_auto = test_auto(cam, props)
         print "Testing manual capture results"
-        test_manual(cam, w_map, h_map, lsc_map_auto, props)
+        test_manual(cam, lsc_map_auto, props)
         print "Testing auto capture results again"
-        test_auto(cam, w_map, h_map, props)
+        test_auto(cam, props)
 
 # A very loose definition for two floats being close to each other;
 # there may be different interpolation and rounding used to get the
@@ -104,11 +101,12 @@
         ax.plot_wireframe(xs, ys, zs)
         matplotlib.pyplot.savefig("%s_plot_lsc_%s_ch%d.png"%(NAME,name,ch))
 
-def test_auto(cam, w_map, h_map, props):
+def test_auto(cam, props):
     # Get 3A lock first, so the auto values in the capture result are
     # populated properly.
     rect = [[0,0,1,1,1]]
-    cam.do_3a(rect, rect, rect, do_af=False)
+    mono_camera = its.caps.mono_camera(props)
+    cam.do_3a(rect, rect, rect, do_af=False, mono_camera=mono_camera)
 
     cap = cam.do_capture(auto_req)
     cap_res = cap["metadata"]
@@ -116,7 +114,10 @@
     gains = cap_res["android.colorCorrection.gains"]
     transform = cap_res["android.colorCorrection.transform"]
     exp_time = cap_res['android.sensor.exposureTime']
-    lsc_map = cap_res["android.statistics.lensShadingMap"]
+    lsc_obj = cap_res["android.statistics.lensShadingCorrectionMap"]
+    lsc_map = lsc_obj["map"]
+    w_map = lsc_obj["width"]
+    h_map = lsc_obj["height"]
     ctrl_mode = cap_res["android.control.mode"]
 
     print "Control mode:", ctrl_mode
@@ -156,17 +157,20 @@
 
     return lsc_map
 
-def test_manual(cam, w_map, h_map, lsc_map_auto, props):
+def test_manual(cam, lsc_map_auto, props):
     cap = cam.do_capture(manual_req)
     cap_res = cap["metadata"]
 
     gains = cap_res["android.colorCorrection.gains"]
     transform = cap_res["android.colorCorrection.transform"]
-    curves = [cap_res["android.tonemap.curveRed"],
-              cap_res["android.tonemap.curveGreen"],
-              cap_res["android.tonemap.curveBlue"]]
+    curves = [cap_res["android.tonemap.curve"]["red"],
+              cap_res["android.tonemap.curve"]["green"],
+              cap_res["android.tonemap.curve"]["blue"]]
     exp_time = cap_res['android.sensor.exposureTime']
-    lsc_map = cap_res["android.statistics.lensShadingMap"]
+    lsc_obj = cap_res["android.statistics.lensShadingCorrectionMap"]
+    lsc_map = lsc_obj["map"]
+    w_map = lsc_obj["width"]
+    h_map = lsc_obj["height"]
     ctrl_mode = cap_res["android.control.mode"]
 
     print "Control mode:", ctrl_mode
diff --git a/apps/CameraITS/tests/scene1/test_crop_region_raw.py b/apps/CameraITS/tests/scene1/test_crop_region_raw.py
index f2d788e..26cdc74 100644
--- a/apps/CameraITS/tests/scene1/test_crop_region_raw.py
+++ b/apps/CameraITS/tests/scene1/test_crop_region_raw.py
@@ -12,30 +12,32 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import its.image
+import os.path
 import its.caps
 import its.device
+import its.image
 import its.objects
 import its.target
 import numpy
-import os.path
+
+CROP_FULL_ERROR_THRESHOLD = 3  # pixels
+CROP_REGION_ERROR_THRESHOLD = 0.01  # reltol
+DIFF_THRESH = 0.05  # reltol
+NAME = os.path.basename(__file__).split(".")[0]
+
 
 def main():
-    """Test that raw streams are not croppable.
-    """
-    NAME = os.path.basename(__file__).split(".")[0]
-
-    DIFF_THRESH = 0.05
-    CROP_REGION_ERROR_THRESHOLD = 0.01
+    """Test that raw streams are not croppable."""
 
     with its.device.ItsSession() as cam:
         props = cam.get_camera_properties()
         its.caps.skip_unless(its.caps.compute_target_exposure(props) and
                              its.caps.raw16(props) and
-                             its.caps.per_frame_control(props))
+                             its.caps.per_frame_control(props) and
+                             not its.caps.mono_camera(props))
 
         # Calculate the active sensor region for a full (non-cropped) image.
-        a = props['android.sensor.info.activeArraySize']
+        a = props["android.sensor.info.activeArraySize"]
         ax, ay = a["left"], a["top"]
         aw, ah = a["right"] - a["left"], a["bottom"] - a["top"]
         print "Active sensor region: (%d,%d %dx%d)" % (ax, ay, aw, ah)
@@ -78,10 +80,10 @@
         # need to perfectly match the one that was requested.
         imgs = {}
         for s, cap, cr_expected, err_delta in [
-                ("yuv_full",cap1_yuv,full_region,0),
-                ("raw_full",cap1_raw,full_region,0),
-                ("yuv_crop",cap2_yuv,crop_region,CROP_REGION_ERROR_THRESHOLD),
-                ("raw_crop",cap2_raw,crop_region,CROP_REGION_ERROR_THRESHOLD)]:
+                ("yuv_full", cap1_yuv, full_region, CROP_FULL_ERROR_THRESHOLD),
+                ("raw_full", cap1_raw, full_region, CROP_FULL_ERROR_THRESHOLD),
+                ("yuv_crop", cap2_yuv, crop_region, CROP_REGION_ERROR_THRESHOLD),
+                ("raw_crop", cap2_raw, crop_region, CROP_REGION_ERROR_THRESHOLD)]:
 
             # Convert the capture to RGB and dump to a file.
             img = its.image.convert_capture_to_rgb_image(cap, props=props)
diff --git a/apps/CameraITS/tests/scene1/test_dng_noise_model.py b/apps/CameraITS/tests/scene1/test_dng_noise_model.py
index 538e786..c447ae5 100644
--- a/apps/CameraITS/tests/scene1/test_dng_noise_model.py
+++ b/apps/CameraITS/tests/scene1/test_dng_noise_model.py
@@ -45,15 +45,16 @@
                              its.caps.raw16(props) and
                              its.caps.manual_sensor(props) and
                              its.caps.read_3a(props) and
-                             its.caps.per_frame_control(props))
+                             its.caps.per_frame_control(props) and
+                             not its.caps.mono_camera(props))
         debug = its.caps.debug_mode()
 
         white_level = float(props['android.sensor.info.whiteLevel'])
         cfa_idxs = its.image.get_canonical_cfa_order(props)
-        aax = props['android.sensor.info.activeArraySize']['left']
-        aay = props['android.sensor.info.activeArraySize']['top']
-        aaw = props['android.sensor.info.activeArraySize']['right']-aax
-        aah = props['android.sensor.info.activeArraySize']['bottom']-aay
+        aax = props['android.sensor.info.preCorrectionActiveArraySize']['left']
+        aay = props['android.sensor.info.preCorrectionActiveArraySize']['top']
+        aaw = props['android.sensor.info.preCorrectionActiveArraySize']['right']-aax
+        aah = props['android.sensor.info.preCorrectionActiveArraySize']['bottom']-aay
 
         # Expose for the scene with min sensitivity
         sens_min, sens_max = props['android.sensor.info.sensitivityRange']
diff --git a/apps/CameraITS/tests/scene1/test_ev_compensation_advanced.py b/apps/CameraITS/tests/scene1/test_ev_compensation_advanced.py
index a3605f6..d087ab1 100644
--- a/apps/CameraITS/tests/scene1/test_ev_compensation_advanced.py
+++ b/apps/CameraITS/tests/scene1/test_ev_compensation_advanced.py
@@ -41,6 +41,7 @@
                              its.caps.per_frame_control(props) and
                              its.caps.ev_compensation(props))
 
+        mono_camera = its.caps.mono_camera(props)
         debug = its.caps.debug_mode()
         largest_yuv = its.objects.get_largest_yuv_format(props)
         if debug:
@@ -63,20 +64,21 @@
         # Converge 3A, and lock AE once converged. skip AF trigger as
         # dark/bright scene could make AF convergence fail and this test
         # doesn't care the image sharpness.
-        cam.do_3a(ev_comp=0, lock_ae=True, do_af=False)
+        cam.do_3a(ev_comp=0, lock_ae=True, do_af=False, mono_camera=mono_camera)
 
         for ev in ev_steps:
 
             # Capture a single shot with the same EV comp and locked AE.
             req = its.objects.auto_capture_request()
             req['android.control.aeExposureCompensation'] = ev
-            req["android.control.aeLock"] = True
+            req['android.control.aeLock'] = True
             # Use linear tone curve to avoid brightness being impacted
             # by tone curves.
-            req["android.tonemap.mode"] = 0
-            req["android.tonemap.curveRed"] = [0.0,0.0, 1.0,1.0]
-            req["android.tonemap.curveGreen"] = [0.0,0.0, 1.0,1.0]
-            req["android.tonemap.curveBlue"] = [0.0,0.0, 1.0,1.0]
+            req['android.tonemap.mode'] = 0
+            req['android.tonemap.curve'] = {
+                'red': [0.0,0.0, 1.0,1.0],
+                'green': [0.0,0.0, 1.0,1.0],
+                'blue': [0.0,0.0, 1.0,1.0]}
             caps = cam.do_capture([req]*THREASH_CONVERGE_FOR_EV, fmt)
 
             for cap in caps:
diff --git a/apps/CameraITS/tests/scene1/test_ev_compensation_basic.py b/apps/CameraITS/tests/scene1/test_ev_compensation_basic.py
index 4473bc7..32e5001 100644
--- a/apps/CameraITS/tests/scene1/test_ev_compensation_basic.py
+++ b/apps/CameraITS/tests/scene1/test_ev_compensation_basic.py
@@ -40,6 +40,7 @@
                              its.caps.ae_lock(props))
 
         debug = its.caps.debug_mode()
+        mono_camera = its.caps.mono_camera(props)
         largest_yuv = its.objects.get_largest_yuv_format(props)
         if debug:
             fmt = largest_yuv
@@ -59,7 +60,7 @@
         # Converge 3A, and lock AE once converged. skip AF trigger as
         # dark/bright scene could make AF convergence fail and this test
         # doesn't care the image sharpness.
-        cam.do_3a(ev_comp=0, lock_ae=True, do_af=False)
+        cam.do_3a(ev_comp=0, lock_ae=True, do_af=False, mono_camera=mono_camera)
 
         for ev in evs:
             # Capture a single shot with the same EV comp and locked AE.
diff --git a/apps/CameraITS/tests/scene1/test_exposure.py b/apps/CameraITS/tests/scene1/test_exposure.py
index ec41bcd..cac49d0 100644
--- a/apps/CameraITS/tests/scene1/test_exposure.py
+++ b/apps/CameraITS/tests/scene1/test_exposure.py
@@ -30,16 +30,18 @@
 THRESHOLD_MAX_LEVEL = 0.9
 THRESHOLD_MAX_LEVEL_DIFF = 0.045
 THRESHOLD_MAX_LEVEL_DIFF_WIDE_RANGE = 0.06
-THRESHOLD_ROUND_DOWN_GAIN = 0.1
-THRESHOLD_ROUND_DOWN_EXP = 0.05
+THRESH_ROUND_DOWN_GAIN = 0.1
+THRESH_ROUND_DOWN_EXP = 0.03
+THRESH_ROUND_DOWN_EXP0 = 1.00  # tol at 0ms exp; theoretical limit @ 4-line exp
+THRESH_EXP_KNEE = 6E6  # exposures less than knee have relaxed tol
 
 
 def get_raw_active_array_size(props):
     """Return the active array w, h from props."""
-    aaw = (props['android.sensor.info.activeArraySize']['right'] -
-           props['android.sensor.info.activeArraySize']['left'])
-    aah = (props['android.sensor.info.activeArraySize']['bottom'] -
-           props['android.sensor.info.activeArraySize']['top'])
+    aaw = (props['android.sensor.info.preCorrectionActiveArraySize']['right'] -
+           props['android.sensor.info.preCorrectionActiveArraySize']['left'])
+    aah = (props['android.sensor.info.preCorrectionActiveArraySize']['bottom'] -
+           props['android.sensor.info.preCorrectionActiveArraySize']['top'])
     return aaw, aah
 
 
@@ -65,9 +67,7 @@
         its.caps.skip_unless(its.caps.compute_target_exposure(props) and
                              its.caps.per_frame_control(props))
 
-        process_raw = (its.caps.compute_target_exposure(props) and
-                       its.caps.per_frame_control(props) and
-                       its.caps.raw16(props) and
+        process_raw = (its.caps.raw16(props) and
                        its.caps.manual_sensor(props))
 
         debug = its.caps.debug_mode()
@@ -90,15 +90,27 @@
             e_test = s_e_product / s_test
             print 'Testing s:', s_test, 'e:', e_test
             req = its.objects.manual_capture_request(
-                s_test, e_test, 0.0, True, props)
+                    s_test, e_test, 0.0, True, props)
             cap = cam.do_capture(req, fmt)
             s_res = cap['metadata']['android.sensor.sensitivity']
             e_res = cap['metadata']['android.sensor.exposureTime']
-            assert 0 <= s_test - s_res < s_test * THRESHOLD_ROUND_DOWN_GAIN
-            assert 0 <= e_test - e_res < e_test * THRESHOLD_ROUND_DOWN_EXP
+            # determine exposure tolerance based on exposure time
+            if e_test >= THRESH_EXP_KNEE:
+                thresh_round_down_exp = THRESH_ROUND_DOWN_EXP
+            else:
+                thresh_round_down_exp = (
+                        THRESH_ROUND_DOWN_EXP +
+                        (THRESH_ROUND_DOWN_EXP0 - THRESH_ROUND_DOWN_EXP) *
+                        (THRESH_EXP_KNEE - e_test) / THRESH_EXP_KNEE)
+            s_msg = 's_write: %d, s_read: %d, TOL=%.f%%' % (
+                    s_test, s_res, THRESH_ROUND_DOWN_GAIN*100)
+            e_msg = 'e_write: %.2fms, e_read: %.2fms, TOL=%.f%%' % (
+                    e_test/1.0E6, e_res/1.0E6, thresh_round_down_exp*100)
+            assert 0 <= s_test - s_res < s_test * THRESH_ROUND_DOWN_GAIN, s_msg
+            assert 0 <= e_test - e_res < e_test * thresh_round_down_exp, e_msg
             s_e_product_res = s_res * e_res
             request_result_ratio = s_e_product / s_e_product_res
-            print 'Capture result s:', s_test, 'e:', e_test
+            print 'Capture result s:', s_res, 'e:', e_res
             img = its.image.convert_capture_to_rgb_image(cap)
             its.image.write_image(img, '%s_mult=%3.2f.jpg' % (NAME, m))
             tile = its.image.get_image_patch(img, 0.45, 0.45, 0.1, 0.1)
diff --git a/apps/CameraITS/tests/scene1/test_linearity.py b/apps/CameraITS/tests/scene1/test_linearity.py
index 35068a8..1f4aa14 100644
--- a/apps/CameraITS/tests/scene1/test_linearity.py
+++ b/apps/CameraITS/tests/scene1/test_linearity.py
@@ -67,9 +67,10 @@
         req = its.objects.manual_capture_request(0, e)
         req['android.blackLevel.lock'] = True
         req['android.tonemap.mode'] = 0
-        req['android.tonemap.curveRed'] = gamma_lut.tolist()
-        req['android.tonemap.curveGreen'] = gamma_lut.tolist()
-        req['android.tonemap.curveBlue'] = gamma_lut.tolist()
+        req['android.tonemap.curve'] = {
+            'red': gamma_lut.tolist(),
+            'green': gamma_lut.tolist(),
+            'blue': gamma_lut.tolist()}
 
         r_means = []
         g_means = []
diff --git a/apps/CameraITS/tests/scene1/test_locked_burst.py b/apps/CameraITS/tests/scene1/test_locked_burst.py
index 47a0186..befbbed 100644
--- a/apps/CameraITS/tests/scene1/test_locked_burst.py
+++ b/apps/CameraITS/tests/scene1/test_locked_burst.py
@@ -40,9 +40,11 @@
         props = cam.get_camera_properties()
         its.caps.skip_unless(its.caps.ae_lock(props) and
                              its.caps.awb_lock(props))
+        mono_camera = its.caps.mono_camera(props)
 
         # Converge 3A prior to capture.
-        cam.do_3a(do_af=True, lock_ae=True, lock_awb=True)
+        cam.do_3a(do_af=True, lock_ae=True, lock_awb=True,
+                  mono_camera=mono_camera)
 
         fmt = its.objects.get_largest_yuv_format(props)
 
diff --git a/apps/CameraITS/tests/scene1/test_multi_camera_match.py b/apps/CameraITS/tests/scene1/test_multi_camera_match.py
new file mode 100644
index 0000000..b5bd63c
--- /dev/null
+++ b/apps/CameraITS/tests/scene1/test_multi_camera_match.py
@@ -0,0 +1,95 @@
+# Copyright 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os.path
+
+import its.caps
+import its.device
+import its.image
+import its.objects
+import its.target
+
+import numpy as np
+NAME = os.path.basename(__file__).split('.')[0]
+PATCH_SIZE = 0.0625  # 1/16 x 1/16 in center of image
+PATCH_LOC = (1-PATCH_SIZE)/2
+THRESH_DIFF = 0.06
+THRESH_GAIN = 0.1
+THRESH_EXP = 0.05
+
+
+def main():
+    """Test both cameras give similar RBG values for gray patch."""
+
+    yuv_sizes = {}
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.compute_target_exposure(props) and
+                             its.caps.per_frame_control(props) and
+                             its.caps.logical_multi_camera(props) and
+                             its.caps.raw16(props) and
+                             its.caps.manual_sensor(props))
+        ids = its.caps.logical_multi_camera_physical_ids(props)
+        max_raw_size = its.objects.get_available_output_sizes('raw', props)[0]
+        for i in ids:
+            physical_props = cam.get_camera_properties_by_id(i)
+            its.caps.skip_unless(not its.caps.mono_camera(physical_props))
+            yuv_sizes[i] = its.objects.get_available_output_sizes(
+                    'yuv', physical_props, match_ar_size=max_raw_size)
+            if i == ids[0]:
+                yuv_match_sizes = yuv_sizes[i]
+            else:
+                list(set(yuv_sizes[i]).intersection(yuv_match_sizes))
+
+        # find matched size for captures
+        yuv_match_sizes.sort()
+        w = yuv_match_sizes[-1][0]
+        h = yuv_match_sizes[-1][1]
+        print 'Matched YUV size: (%d, %d)' % (w, h)
+
+        # do 3a and create requests
+        avail_fls = props['android.lens.info.availableFocalLengths']
+        cam.do_3a()
+        reqs = []
+        for i, fl in enumerate(avail_fls):
+            reqs.append(its.objects.auto_capture_request())
+            reqs[i]['android.lens.focalLength'] = fl
+
+        # capture YUVs
+        y_means = {}
+        msg = ''
+        fmt = [{'format': 'yuv', 'width': w, 'height': h}]
+        caps = cam.do_capture(reqs, fmt)
+        if not isinstance(caps, list):
+            caps = [caps]  # handle canonical case where caps is not list
+
+        for i, fl in enumerate(avail_fls):
+            img = its.image.convert_capture_to_rgb_image(caps[i], props=props)
+            its.image.write_image(img, '%s_yuv_fl=%s.jpg' % (NAME, fl))
+            y, _, _ = its.image.convert_capture_to_planes(caps[i], props=props)
+            y_mean = its.image.compute_image_means(
+                    its.image.get_image_patch(y, PATCH_LOC, PATCH_LOC,
+                                              PATCH_SIZE, PATCH_SIZE))[0]
+            print 'y[%s]: %.3f' % (fl, y_mean)
+            msg += 'y[%s]: %.3f, ' % (fl, y_mean)
+            y_means[fl] = y_mean
+
+        # compare YUVs
+        msg += 'TOL=%.5f' % THRESH_DIFF
+        assert np.isclose(max(y_means.values()), min(y_means.values()),
+                          rtol=THRESH_DIFF), msg
+
+
+if __name__ == '__main__':
+    main()
diff --git a/apps/CameraITS/tests/scene1/test_param_color_correction.py b/apps/CameraITS/tests/scene1/test_param_color_correction.py
index 3dac3f5..83f4f7f 100644
--- a/apps/CameraITS/tests/scene1/test_param_color_correction.py
+++ b/apps/CameraITS/tests/scene1/test_param_color_correction.py
@@ -38,7 +38,8 @@
     with its.device.ItsSession() as cam:
         props = cam.get_camera_properties()
         its.caps.skip_unless(its.caps.compute_target_exposure(props) and
-                             its.caps.per_frame_control(props))
+                             its.caps.per_frame_control(props) and
+                             not its.caps.mono_camera(props))
 
         # Baseline request
         debug = its.caps.debug_mode()
diff --git a/apps/CameraITS/tests/scene1/test_param_flash_mode.py b/apps/CameraITS/tests/scene1/test_param_flash_mode.py
index 2d8c678..cfd88d4 100644
--- a/apps/CameraITS/tests/scene1/test_param_flash_mode.py
+++ b/apps/CameraITS/tests/scene1/test_param_flash_mode.py
@@ -12,17 +12,21 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import its.image
+import os.path
 import its.caps
 import its.device
+import its.image
 import its.objects
 import its.target
-import os.path
+
+NAME = os.path.basename(__file__).split('.')[0]
+GRADIENT_DELTA = 0.1
+Y_RELATIVE_DELTA_FLASH = 0.1  # 10%
+Y_RELATIVE_DELTA_TORCH = 0.05 # 5%
+
 
 def main():
-    """Test that the android.flash.mode parameter is applied.
-    """
-    NAME = os.path.basename(__file__).split(".")[0]
+    """Test that the android.flash.mode parameter is applied."""
 
     with its.device.ItsSession() as cam:
         props = cam.get_camera_properties()
@@ -32,7 +36,8 @@
 
         flash_modes_reported = []
         flash_states_reported = []
-        g_means = []
+        means = []
+        grads = []
 
         # Manually set the exposure to be a little on the dark side, so that
         # it should be obvious whether the flash fired or not, and use a
@@ -45,29 +50,33 @@
             match_ar = (largest_yuv['width'], largest_yuv['height'])
             fmt = its.objects.get_smallest_yuv_format(props, match_ar=match_ar)
 
-        e, s = its.target.get_target_exposure_combos(cam)["midExposureTime"]
-        e /= 4
+        e, s = its.target.get_target_exposure_combos(cam)['midExposureTime']
+        e /= 2
         req = its.objects.manual_capture_request(s, e, 0.0, True, props)
 
-        for f in [0,1,2]:
-            req["android.flash.mode"] = f
+        for f in [0, 1, 2]:
+            req['android.flash.mode'] = f
             cap = cam.do_capture(req, fmt)
-            flash_modes_reported.append(cap["metadata"]["android.flash.mode"])
-            flash_states_reported.append(cap["metadata"]["android.flash.state"])
-            img = its.image.convert_capture_to_rgb_image(cap)
-            its.image.write_image(img, "%s_mode=%d.jpg" % (NAME, f))
-            tile = its.image.get_image_patch(img, 0.45, 0.45, 0.1, 0.1)
-            rgb = its.image.compute_image_means(tile)
-            g_means.append(rgb[1])
+            flash_modes_reported.append(cap['metadata']['android.flash.mode'])
+            flash_states_reported.append(cap['metadata']['android.flash.state'])
+            y, _, _ = its.image.convert_capture_to_planes(cap, props)
+            its.image.write_image(y, '%s_%d.jpg' % (NAME, f))
+            tile = its.image.get_image_patch(y, 0.375, 0.375, 0.25, 0.25)
+            its.image.write_image(tile, '%s_%d_tile.jpg' % (NAME, f))
+            means.append(its.image.compute_image_means(tile)[0])
+            grads.append(its.image.compute_image_max_gradients(tile)[0])
 
-        assert(flash_modes_reported == [0,1,2])
-        assert(flash_states_reported[0] not in [3,4])
-        assert(flash_states_reported[1] in [3,4])
-        assert(flash_states_reported[2] in [3,4])
+        assert flash_modes_reported == [0, 1, 2]
+        assert flash_states_reported[0] not in [3, 4]
+        assert flash_states_reported[1] in [3, 4]
+        assert flash_states_reported[2] in [3, 4]
 
-        print "G brightnesses:", g_means
-        assert(g_means[1] > g_means[0])
-        assert(g_means[2] > g_means[0])
+        print 'Brightnesses:', means
+        print 'Max gradients: ', grads
+        assert (grads[1]-grads[0] > GRADIENT_DELTA or
+                (means[1]-means[0]) / means[0] > Y_RELATIVE_DELTA_FLASH)
+        assert (grads[2]-grads[0] > GRADIENT_DELTA or
+                (means[2]-means[0]) / means[0] > Y_RELATIVE_DELTA_TORCH)
 
 if __name__ == '__main__':
     main()
diff --git a/apps/CameraITS/tests/scene1/test_param_shading_mode.py b/apps/CameraITS/tests/scene1/test_param_shading_mode.py
index f4c2b99..45c9a12 100644
--- a/apps/CameraITS/tests/scene1/test_param_shading_mode.py
+++ b/apps/CameraITS/tests/scene1/test_param_shading_mode.py
@@ -40,35 +40,38 @@
                              its.caps.lsc_map(props) and
                              its.caps.lsc_off(props))
 
-        assert(props.has_key("android.lens.info.shadingMapSize") and
-               props["android.lens.info.shadingMapSize"] != None)
+        mono_camera = its.caps.mono_camera(props)
 
         # lsc_off devices should always support OFF(0), FAST(1), and HQ(2)
         assert(props.has_key("android.shading.availableModes") and
                set(props["android.shading.availableModes"]) == set([0, 1, 2]))
 
-        num_map_gains = props["android.lens.info.shadingMapSize"]["width"] * \
-                        props["android.lens.info.shadingMapSize"]["height"] * 4
-
         # Test 1: Switching shading modes several times and verify:
         #   1. Lens shading maps with mode OFF are all 1.0
         #   2. Lens shading maps with mode FAST are similar after switching
         #      shading modes.
         #   3. Lens shading maps with mode HIGH_QUALITY are similar after
         #      switching shading modes.
-        cam.do_3a();
+        cam.do_3a(mono_camera=mono_camera);
 
         # Get the reference lens shading maps for OFF, FAST, and HIGH_QUALITY
         # in different sessions.
         # reference_maps[mode]
         reference_maps = [[] for mode in range(3)]
-        reference_maps[0] = [1.0] * num_map_gains
+        num_map_gains = 0
         for mode in range(1, 3):
             req = its.objects.auto_capture_request();
             req["android.statistics.lensShadingMapMode"] = 1
             req["android.shading.mode"] = mode
-            reference_maps[mode] = cam.do_capture(req)["metadata"] \
-                    ["android.statistics.lensShadingMap"]
+            cap_res = cam.do_capture(req)["metadata"]
+            lsc_map = cap_res["android.statistics.lensShadingCorrectionMap"]
+            assert(lsc_map.has_key("width") and
+                   lsc_map.has_key("height") and
+                   lsc_map["width"] != None and lsc_map["height"] != None)
+            if mode == 1:
+                num_map_gains = lsc_map["width"] * lsc_map["height"] * 4
+                reference_maps[0] = [1.0] * num_map_gains
+            reference_maps[mode] = lsc_map["map"]
 
         # Get the lens shading maps while switching modes in one session.
         reqs = []
@@ -88,7 +91,8 @@
         # Get the shading maps out of capture results
         for i in range(len(caps)):
             shading_maps[i % 3][i / 3] = \
-                    caps[i]["metadata"]["android.statistics.lensShadingMap"]
+                    caps[i]["metadata"] \
+                    ["android.statistics.lensShadingCorrectionMap"]["map"]
 
         # Draw the maps
         for mode in range(3):
diff --git a/apps/CameraITS/tests/scene1/test_param_tonemap_mode.py b/apps/CameraITS/tests/scene1/test_param_tonemap_mode.py
index 1229f90..45a5b13 100644
--- a/apps/CameraITS/tests/scene1/test_param_tonemap_mode.py
+++ b/apps/CameraITS/tests/scene1/test_param_tonemap_mode.py
@@ -62,12 +62,13 @@
         for n in [0,1]:
             req = its.objects.manual_capture_request(s,e)
             req["android.tonemap.mode"] = 0
-            req["android.tonemap.curveRed"] = (
-                    sum([[i/LM1, min(1.0,(1+0.5*n)*i/LM1)] for i in range(L)], []))
-            req["android.tonemap.curveGreen"] = (
-                    sum([[i/LM1, min(1.0,(1+1.0*n)*i/LM1)] for i in range(L)], []))
-            req["android.tonemap.curveBlue"] = (
-                    sum([[i/LM1, min(1.0,(1+1.5*n)*i/LM1)] for i in range(L)], []))
+            req["android.tonemap.curve"] = {
+                "red": (sum([[i/LM1, min(1.0,(1+0.5*n)*i/LM1)]
+                    for i in range(L)], [])),
+                "green": (sum([[i/LM1, min(1.0,(1+1.0*n)*i/LM1)]
+                    for i in range(L)], [])),
+                "blue": (sum([[i/LM1, min(1.0,(1+1.5*n)*i/LM1)]
+                    for i in range(L)], []))}
             cap = cam.do_capture(req, fmt)
             img = its.image.convert_capture_to_rgb_image(cap)
             its.image.write_image(
@@ -90,9 +91,8 @@
             curve = sum([[i/m, i/m] for i in range(size)], [])
             req = its.objects.manual_capture_request(s,e)
             req["android.tonemap.mode"] = 0
-            req["android.tonemap.curveRed"] = curve
-            req["android.tonemap.curveGreen"] = curve
-            req["android.tonemap.curveBlue"] = curve
+            req["android.tonemap.curve"] = {
+                "red": curve, "green": curve, "blue": curve}
             cap = cam.do_capture(req)
             img = its.image.convert_capture_to_rgb_image(cap)
             its.image.write_image(
diff --git a/apps/CameraITS/tests/scene1/test_post_raw_sensitivity_boost.py b/apps/CameraITS/tests/scene1/test_post_raw_sensitivity_boost.py
index 70b1927..73c001d 100644
--- a/apps/CameraITS/tests/scene1/test_post_raw_sensitivity_boost.py
+++ b/apps/CameraITS/tests/scene1/test_post_raw_sensitivity_boost.py
@@ -40,7 +40,8 @@
         its.caps.skip_unless(its.caps.raw_output(props) and
                              its.caps.post_raw_sensitivity_boost(props) and
                              its.caps.compute_target_exposure(props) and
-                             its.caps.per_frame_control(props))
+                             its.caps.per_frame_control(props) and
+                             not its.caps.mono_camera(props))
 
         w,h = its.objects.get_available_output_sizes(
                 "yuv", props, (1920, 1080))[0]
diff --git a/apps/CameraITS/tests/scene1/test_raw_burst_sensitivity.py b/apps/CameraITS/tests/scene1/test_raw_burst_sensitivity.py
index 053f7ca..b6b0514 100644
--- a/apps/CameraITS/tests/scene1/test_raw_burst_sensitivity.py
+++ b/apps/CameraITS/tests/scene1/test_raw_burst_sensitivity.py
@@ -39,7 +39,8 @@
         its.caps.skip_unless(its.caps.raw16(props) and
                              its.caps.manual_sensor(props) and
                              its.caps.read_3a(props) and
-                             its.caps.per_frame_control(props))
+                             its.caps.per_frame_control(props) and
+                             not its.caps.mono_camera(props))
         debug = its.caps.debug_mode()
 
         # Expose for the scene with min sensitivity
@@ -62,10 +63,10 @@
             caps = cam.do_capture(reqs, cam.CAP_RAW)
         else:
             # Get the active array width and height.
-            aax = props["android.sensor.info.activeArraySize"]["left"]
-            aay = props["android.sensor.info.activeArraySize"]["top"]
-            aaw = props["android.sensor.info.activeArraySize"]["right"]-aax
-            aah = props["android.sensor.info.activeArraySize"]["bottom"]-aay
+            aax = props["android.sensor.info.preCorrectionActiveArraySize"]["left"]
+            aay = props["android.sensor.info.preCorrectionActiveArraySize"]["top"]
+            aaw = props["android.sensor.info.preCorrectionActiveArraySize"]["right"]-aax
+            aah = props["android.sensor.info.preCorrectionActiveArraySize"]["bottom"]-aay
             # Compute stats on a grid across each image.
             caps = cam.do_capture(reqs,
                                   {"format": "rawStats",
diff --git a/apps/CameraITS/tests/scene1/test_raw_exposure.py b/apps/CameraITS/tests/scene1/test_raw_exposure.py
new file mode 100644
index 0000000..ca59aa8
--- /dev/null
+++ b/apps/CameraITS/tests/scene1/test_raw_exposure.py
@@ -0,0 +1,162 @@
+# Copyright 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os.path
+import its.caps
+import its.device
+import its.image
+import its.objects
+from matplotlib import pylab
+import matplotlib.pyplot
+import numpy as np
+
+IMG_STATS_GRID = 9  # find used to find the center 11.11%
+NAME = os.path.basename(__file__).split(".")[0]
+NUM_ISO_STEPS = 5
+SATURATION_TOL = 0.01
+BLK_LVL_TOL = 0.1
+# Test 3 steps per 2x exposure
+EXP_MULT = pow(2, 1.0/3)
+INCREASING_THR = 0.99
+# slice captures into burst of SLICE_LEN requests
+SLICE_LEN = 10
+
+
+def main():
+    """Capture a set of raw images with increasing exposure time and measure the pixel values.
+    """
+
+    with its.device.ItsSession() as cam:
+
+        props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.raw16(props) and
+                             its.caps.manual_sensor(props) and
+                             its.caps.read_3a(props) and
+                             its.caps.per_frame_control(props) and
+                             not its.caps.mono_camera(props))
+        debug = its.caps.debug_mode()
+
+        # Expose for the scene with min sensitivity
+        exp_min, exp_max = props["android.sensor.info.exposureTimeRange"]
+        sens_min, _ = props["android.sensor.info.sensitivityRange"]
+        # Digital gains might not be visible on RAW data
+        sens_max = props["android.sensor.maxAnalogSensitivity"]
+        sens_step = (sens_max - sens_min) / NUM_ISO_STEPS
+        white_level = float(props["android.sensor.info.whiteLevel"])
+        black_levels = [its.image.get_black_level(i, props) for i in range(4)]
+        # Get the active array width and height.
+        aax = props["android.sensor.info.preCorrectionActiveArraySize"]["left"]
+        aay = props["android.sensor.info.preCorrectionActiveArraySize"]["top"]
+        aaw = props["android.sensor.info.preCorrectionActiveArraySize"]["right"]-aax
+        aah = props["android.sensor.info.preCorrectionActiveArraySize"]["bottom"]-aay
+        raw_stat_fmt = {"format": "rawStats",
+                        "gridWidth": aaw/IMG_STATS_GRID,
+                        "gridHeight": aah/IMG_STATS_GRID}
+
+        e_test = []
+        mult = 1.0
+        while exp_min*mult < exp_max:
+            e_test.append(int(exp_min*mult))
+            mult *= EXP_MULT
+        if e_test[-1] < exp_max * INCREASING_THR:
+            e_test.append(int(exp_max))
+        e_test_ms = [e / 1000000.0 for e in e_test]
+
+        for s in range(sens_min, sens_max, sens_step):
+            means = []
+            means.append(black_levels)
+            reqs = [its.objects.manual_capture_request(s, e, 0) for e in e_test]
+            # Capture raw in debug mode, rawStats otherwise
+            caps = []
+            for i in range(len(reqs) / SLICE_LEN):
+                if debug:
+                    caps += cam.do_capture(reqs[i*SLICE_LEN:(i+1)*SLICE_LEN], cam.CAP_RAW)
+                else:
+                    caps += cam.do_capture(reqs[i*SLICE_LEN:(i+1)*SLICE_LEN], raw_stat_fmt)
+            last_n = len(reqs) % SLICE_LEN
+            if last_n == 1:
+                if debug:
+                    caps += [cam.do_capture(reqs[-last_n:], cam.CAP_RAW)]
+                else:
+                    caps += [cam.do_capture(reqs[-last_n:], raw_stat_fmt)]
+            elif last_n > 0:
+                if debug:
+                    caps += cam.do_capture(reqs[-last_n:], cam.CAP_RAW)
+                else:
+                    caps += cam.do_capture(reqs[-last_n:], raw_stat_fmt)
+
+            # Measure the mean of each channel.
+            # Each shot should be brighter (except underexposed/overexposed scene)
+            for i, cap in enumerate(caps):
+                if debug:
+                    planes = its.image.convert_capture_to_planes(cap, props)
+                    tiles = [its.image.get_image_patch(p, 0.445, 0.445, 0.11, 0.11) for p in planes]
+                    mean = [m * white_level for tile in tiles
+                            for m in its.image.compute_image_means(tile)]
+                    img = its.image.convert_capture_to_rgb_image(cap, props=props)
+                    its.image.write_image(img, "%s_s=%d_e=%05d.jpg"
+                                          % (NAME, s, e_test[i]))
+                else:
+                    mean_image, _ = its.image.unpack_rawstats_capture(cap)
+                    mean = mean_image[IMG_STATS_GRID/2, IMG_STATS_GRID/2]
+
+                print "ISO=%d, exposure time=%.3fms, mean=%s" % (
+                        s, e_test[i] / 1000000.0, str(mean))
+                means.append(mean)
+
+            # means[0] is black level value
+            r = [m[0] for m in means[1:]]
+            gr = [m[1] for m in means[1:]]
+            gb = [m[2] for m in means[1:]]
+            b = [m[3] for m in means[1:]]
+
+            pylab.plot(e_test_ms, r, "r.-")
+            pylab.plot(e_test_ms, b, "b.-")
+            pylab.plot(e_test_ms, gr, "g.-")
+            pylab.plot(e_test_ms, gb, "k.-")
+            pylab.xscale("log")
+            pylab.yscale("log")
+            pylab.title("%s ISO=%d" % (NAME, s))
+            pylab.xlabel("Exposure time (ms)")
+            pylab.ylabel("Center patch pixel mean")
+            matplotlib.pyplot.savefig("%s_s=%d.png" % (NAME, s))
+            pylab.clf()
+
+            allow_under_saturated = True
+            for i in xrange(1, len(means)):
+                prev_mean = means[i-1]
+                mean = means[i]
+
+                if np.isclose(max(mean), white_level, rtol=SATURATION_TOL):
+                    print "Saturated: white_level %f, max_mean %f"% (white_level, max(mean))
+                    break
+
+                if allow_under_saturated and np.allclose(mean, black_levels, rtol=BLK_LVL_TOL):
+                    # All channel means are close to black level
+                    continue
+
+                allow_under_saturated = False
+                # Check pixel means are increasing (with small tolerance)
+                channels = ["Red", "Gr", "Gb", "Blue"]
+                for chan in range(4):
+                    err_msg = "ISO=%d, %s, exptime %3fms mean: %.2f, %s mean: %.2f, TOL=%.f%%" % (
+                            s, channels[chan],
+                            e_test_ms[i-1], mean[chan],
+                            "black level" if i == 1 else "exptime %3fms"%e_test_ms[i-2],
+                            prev_mean[chan],
+                            INCREASING_THR*100)
+                    assert mean[chan] > prev_mean[chan] * INCREASING_THR, err_msg
+
+if __name__ == "__main__":
+    main()
diff --git a/apps/CameraITS/tests/scene1/test_raw_sensitivity.py b/apps/CameraITS/tests/scene1/test_raw_sensitivity.py
index 6dac206..db69e36 100644
--- a/apps/CameraITS/tests/scene1/test_raw_sensitivity.py
+++ b/apps/CameraITS/tests/scene1/test_raw_sensitivity.py
@@ -37,7 +37,8 @@
         its.caps.skip_unless(its.caps.raw16(props) and
                              its.caps.manual_sensor(props) and
                              its.caps.read_3a(props) and
-                             its.caps.per_frame_control(props))
+                             its.caps.per_frame_control(props) and
+                             not its.caps.mono_camera(props))
         debug = its.caps.debug_mode()
 
         # Expose for the scene with min sensitivity
@@ -67,10 +68,10 @@
                                       (NAME, s, var))
             else:
                 # Get the active array width and height.
-                aax = props["android.sensor.info.activeArraySize"]["left"]
-                aay = props["android.sensor.info.activeArraySize"]["top"]
-                aaw = props["android.sensor.info.activeArraySize"]["right"]-aax
-                aah = props["android.sensor.info.activeArraySize"]["bottom"]-aay
+                aax = props["android.sensor.info.preCorrectionActiveArraySize"]["left"]
+                aay = props["android.sensor.info.preCorrectionActiveArraySize"]["top"]
+                aaw = props["android.sensor.info.preCorrectionActiveArraySize"]["right"]-aax
+                aah = props["android.sensor.info.preCorrectionActiveArraySize"]["bottom"]-aay
                 white_level = float(props["android.sensor.info.whiteLevel"])
                 cap = cam.do_capture(req,
                                      {"format": "rawStats",
diff --git a/apps/CameraITS/tests/scene1/test_tonemap_sequence.py b/apps/CameraITS/tests/scene1/test_tonemap_sequence.py
index 2b5f094..a7b9f6d 100644
--- a/apps/CameraITS/tests/scene1/test_tonemap_sequence.py
+++ b/apps/CameraITS/tests/scene1/test_tonemap_sequence.py
@@ -33,7 +33,8 @@
         props = cam.get_camera_properties()
         its.caps.skip_unless(its.caps.manual_sensor(props) and
                              its.caps.manual_post_proc(props) and
-                             its.caps.per_frame_control(props))
+                             its.caps.per_frame_control(props) and
+                             not its.caps.mono_camera(props))
 
         debug = its.caps.debug_mode()
         largest_yuv = its.objects.get_largest_yuv_format(props)
diff --git a/apps/CameraITS/tests/scene1/test_yuv_plus_dng.py b/apps/CameraITS/tests/scene1/test_yuv_plus_dng.py
index 268b64a..4a62120 100644
--- a/apps/CameraITS/tests/scene1/test_yuv_plus_dng.py
+++ b/apps/CameraITS/tests/scene1/test_yuv_plus_dng.py
@@ -27,8 +27,9 @@
         props = cam.get_camera_properties()
         its.caps.skip_unless(its.caps.raw(props) and
                              its.caps.read_3a(props))
+        mono_camera = its.caps.mono_camera(props)
 
-        cam.do_3a()
+        cam.do_3a(mono_camera=mono_camera)
 
         req = its.objects.auto_capture_request()
         max_dng_size = \
diff --git a/apps/CameraITS/tests/scene1/test_yuv_plus_raw.py b/apps/CameraITS/tests/scene1/test_yuv_plus_raw.py
index dd7ef21..0c5b78b 100644
--- a/apps/CameraITS/tests/scene1/test_yuv_plus_raw.py
+++ b/apps/CameraITS/tests/scene1/test_yuv_plus_raw.py
@@ -31,7 +31,8 @@
         props = cam.get_camera_properties()
         its.caps.skip_unless(its.caps.compute_target_exposure(props) and
                              its.caps.raw16(props) and
-                             its.caps.per_frame_control(props))
+                             its.caps.per_frame_control(props) and
+                             not its.caps.mono_camera(props))
 
         # Use a manual request with a linear tonemap so that the YUV and RAW
         # should look the same (once converted by the its.image module).
diff --git a/apps/CameraITS/tests/scene1/test_yuv_plus_raw10.py b/apps/CameraITS/tests/scene1/test_yuv_plus_raw10.py
index 9c0c69b..6e700b7 100644
--- a/apps/CameraITS/tests/scene1/test_yuv_plus_raw10.py
+++ b/apps/CameraITS/tests/scene1/test_yuv_plus_raw10.py
@@ -31,7 +31,8 @@
         props = cam.get_camera_properties()
         its.caps.skip_unless(its.caps.compute_target_exposure(props) and
                              its.caps.raw10(props) and
-                             its.caps.per_frame_control(props))
+                             its.caps.per_frame_control(props) and
+                             not its.caps.mono_camera(props))
 
         # Use a manual request with a linear tonemap so that the YUV and RAW
         # should look the same (once converted by the its.image module).
diff --git a/apps/CameraITS/tests/scene1/test_yuv_plus_raw12.py b/apps/CameraITS/tests/scene1/test_yuv_plus_raw12.py
index 8070785..cd284b7 100644
--- a/apps/CameraITS/tests/scene1/test_yuv_plus_raw12.py
+++ b/apps/CameraITS/tests/scene1/test_yuv_plus_raw12.py
@@ -31,7 +31,8 @@
         props = cam.get_camera_properties()
         its.caps.skip_unless(its.caps.compute_target_exposure(props) and
                              its.caps.raw12(props) and
-                             its.caps.per_frame_control(props))
+                             its.caps.per_frame_control(props) and
+                             not its.caps.mono_camera(props))
 
         # Use a manual request with a linear tonemap so that the YUV and RAW
         # should look the same (once converted by the its.image module).
diff --git a/apps/CameraITS/tests/scene2/scene2_0.67_scaled.pdf b/apps/CameraITS/tests/scene2/scene2_0.67_scaled.pdf
new file mode 100644
index 0000000..7b64817
--- /dev/null
+++ b/apps/CameraITS/tests/scene2/scene2_0.67_scaled.pdf
Binary files differ
diff --git a/apps/CameraITS/tests/scene2/test_faces.py b/apps/CameraITS/tests/scene2/test_faces.py
index 4e30fc1..16ec780 100644
--- a/apps/CameraITS/tests/scene2/test_faces.py
+++ b/apps/CameraITS/tests/scene2/test_faces.py
@@ -30,11 +30,14 @@
 
     with its.device.ItsSession() as cam:
         props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.face_detect(props))
+        mono_camera = its.caps.mono_camera(props)
         fd_modes = props['android.statistics.info.availableFaceDetectModes']
         a = props['android.sensor.info.activeArraySize']
         aw, ah = a['right'] - a['left'], a['bottom'] - a['top']
         if its.caps.read_3a(props):
-            gain, exp, _, _, focus = cam.do_3a(get_results=True)
+            gain, exp, _, _, focus = cam.do_3a(get_results=True,
+                                               mono_camera=mono_camera)
             print 'iso = %d' % gain
             print 'exp = %.2fms' % (exp*1.0E-6)
             if focus == 0.0:
@@ -60,7 +63,7 @@
                 # but it should detect at least one face in last frame
                 if i == NUM_TEST_FRAMES - 1:
                     img = its.image.convert_capture_to_rgb_image(cap, props=props)
-                    img = its.image.flip_mirror_img_per_argv(img)
+                    img = its.image.rotate_img_per_argv(img)
                     img_name = "%s_fd_mode_%s.jpg" % (NAME, fd_mode)
                     its.image.write_image(img, img_name)
                     if len(faces) == 0:
diff --git a/apps/CameraITS/tests/scene2/test_num_faces.py b/apps/CameraITS/tests/scene2/test_num_faces.py
index 16e53ad..044c154 100644
--- a/apps/CameraITS/tests/scene2/test_num_faces.py
+++ b/apps/CameraITS/tests/scene2/test_num_faces.py
@@ -32,12 +32,15 @@
     """Test face detection."""
     with its.device.ItsSession() as cam:
         props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.face_detect(props))
+        mono_camera = its.caps.mono_camera(props)
         fd_modes = props['android.statistics.info.availableFaceDetectModes']
         a = props['android.sensor.info.activeArraySize']
         aw, ah = a['right'] - a['left'], a['bottom'] - a['top']
 
         if its.caps.read_3a(props):
-            _, _, _, _, _ = cam.do_3a(get_results=True)
+            _, _, _, _, _ = cam.do_3a(get_results=True,
+                                      mono_camera=mono_camera)
 
         for fd_mode in fd_modes:
             assert FD_MODE_OFF <= fd_mode <= FD_MODE_FULL
diff --git a/apps/CameraITS/tests/scene3/scene3_0.67_scaled.pdf b/apps/CameraITS/tests/scene3/scene3_0.67_scaled.pdf
new file mode 100644
index 0000000..a3e18e2
--- /dev/null
+++ b/apps/CameraITS/tests/scene3/scene3_0.67_scaled.pdf
Binary files differ
diff --git a/apps/CameraITS/tests/scene3/test_3a_consistency.py b/apps/CameraITS/tests/scene3/test_3a_consistency.py
index f43b3eb..e86da42 100644
--- a/apps/CameraITS/tests/scene3/test_3a_consistency.py
+++ b/apps/CameraITS/tests/scene3/test_3a_consistency.py
@@ -20,8 +20,7 @@
 
 GGAIN_TOL = 0.1
 FD_TOL = 0.1
-SENS_TOL = 0.1
-EXP_TOL = 0.1
+ISO_EXP_TOL = 0.1
 NUM_TEST_ITERATIONS = 3
 
 
@@ -34,29 +33,32 @@
     with its.device.ItsSession() as cam:
         props = cam.get_camera_properties()
         its.caps.skip_unless(its.caps.read_3a(props))
+        mono_camera = its.caps.mono_camera(props)
 
-        exps = []
-        senses = []
+        iso_exps = []
         g_gains = []
         fds = []
         for _ in range(NUM_TEST_ITERATIONS):
             try:
-                s, e, g, xform, fd = cam.do_3a(get_results=True)
-                print ' sensitivity', s, 'exposure', e
-                print ' gains', g, 'transform', xform
+                s, e, gains, xform, fd = cam.do_3a(get_results=True,
+                                                   mono_camera=mono_camera)
+                print ' iso: %d, exposure: %d, iso*exp: %d' % (s, e, e*s)
+                print ' awb_gains', gains, 'awb_transform', xform
                 print ' fd', fd
                 print ''
-                exps.append(e)
-                senses.append(s)
-                g_gains.append(g[2])
+                iso_exps.append(e*s)
+                g_gains.append(gains[2])
                 fds.append(fd)
             except its.error.Error:
                 print ' FAIL\n'
-        assert len(exps) == NUM_TEST_ITERATIONS
-        assert np.isclose(np.amax(exps), np.amin(exps), EXP_TOL)
-        assert np.isclose(np.amax(senses), np.amin(senses), SENS_TOL)
+        assert len(iso_exps) == NUM_TEST_ITERATIONS
+        assert np.isclose(np.amax(iso_exps), np.amin(iso_exps), ISO_EXP_TOL)
         assert np.isclose(np.amax(g_gains), np.amin(g_gains), GGAIN_TOL)
         assert np.isclose(np.amax(fds), np.amin(fds), FD_TOL)
+        for g in gains:
+            assert not np.isnan(g)
+        for x in xform:
+            assert not np.isnan(x)
 
 if __name__ == '__main__':
     main()
diff --git a/apps/CameraITS/tests/scene3/test_edge_enhancement.py b/apps/CameraITS/tests/scene3/test_edge_enhancement.py
index 37e1d63..76093ef 100644
--- a/apps/CameraITS/tests/scene3/test_edge_enhancement.py
+++ b/apps/CameraITS/tests/scene3/test_edge_enhancement.py
@@ -88,12 +88,13 @@
                              its.caps.per_frame_control(props) and
                              its.caps.edge_mode(props, 0))
 
+        mono_camera = its.caps.mono_camera(props)
         test_fmt = "yuv"
         size = its.objects.get_available_output_sizes(test_fmt, props)[0]
         out_surface = {"width":size[0], "height":size[1], "format":test_fmt}
 
         # Get proper sensitivity, exposure time, and focus distance.
-        s,e,_,_,fd = cam.do_3a(get_results=True)
+        s,e,_,_,fd = cam.do_3a(get_results=True, mono_camera=mono_camera)
 
         # Get the sharpness for each edge mode for regular requests
         sharpness_regular = []
diff --git a/apps/CameraITS/tests/scene3/test_flip_mirror.py b/apps/CameraITS/tests/scene3/test_flip_mirror.py
new file mode 100644
index 0000000..9742e0b
--- /dev/null
+++ b/apps/CameraITS/tests/scene3/test_flip_mirror.py
@@ -0,0 +1,140 @@
+# Copyright 2016 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the 'License');
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an 'AS IS' BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import cv2
+
+import its.caps
+import its.cv2image
+import its.device
+import its.image
+import its.objects
+import numpy as np
+
+NAME = os.path.basename(__file__).split('.')[0]
+CHART_FILE = os.path.join(os.environ['CAMERA_ITS_TOP'], 'pymodules', 'its',
+                          'test_images', 'ISO12233.png')
+CHART_HEIGHT = 13.5  # cm
+CHART_DISTANCE = 30.0  # cm
+CHART_SCALE_START = 0.65
+CHART_SCALE_STOP = 1.35
+CHART_SCALE_STEP = 0.025
+CHART_ORIENTATIONS = ['nominal', 'flip', 'mirror', 'rotate']
+VGA_WIDTH = 640
+VGA_HEIGHT = 480
+(X_CROP, Y_CROP) = (0.5, 0.5)  # crop center area of ISO12233 chart
+
+
+def test_flip_mirror(cam, props, fmt, chart):
+    """Return if image is flipped or mirrored.
+
+    Args:
+        cam (class): An open device session
+        props (class): Properties of cam
+        fmt (dict): Capture format
+        chart (class): Object with chart properties
+
+    Returns:
+        boolean: True if flipped, False if not
+    """
+
+    # determine if monochrome camera
+    mono_camera = its.caps.mono_camera(props)
+
+    # determine if in debug mode
+    debug = its.caps.debug_mode()
+
+    # get a local copy of the chart template
+    template = cv2.imread(CHART_FILE, cv2.IMREAD_ANYDEPTH)
+
+    # take img, crop chart, scale and prep for cv2 template match
+    s, e, _, _, fd = cam.do_3a(get_results=True, mono_camera=mono_camera)
+    req = its.objects.manual_capture_request(s, e, fd)
+    cap = cam.do_capture(req, fmt)
+    y, _, _ = its.image.convert_capture_to_planes(cap, props)
+    y = its.image.rotate_img_per_argv(y)
+    patch = its.image.get_image_patch(y, chart.xnorm, chart.ynorm,
+                                      chart.wnorm, chart.hnorm)
+    patch = 255 * its.cv2image.gray_scale_img(patch)
+    patch = its.cv2image.scale_img(patch.astype(np.uint8), chart.scale)
+
+    # sanity check on image
+    assert np.max(patch)-np.min(patch) > 255/8
+
+    # save full images if in debug
+    if debug:
+        its.image.write_image(template[:, :, np.newaxis]/255.0,
+                              '%s_template.jpg' % NAME)
+
+    # save patch
+    its.image.write_image(patch[:, :, np.newaxis]/255.0,
+                          '%s_scene_patch.jpg' % NAME)
+
+    # crop center areas and strip off any extra rows/columns
+    template = its.image.get_image_patch(template, (1-X_CROP)/2, (1-Y_CROP)/2,
+                                         X_CROP, Y_CROP)
+    patch = its.image.get_image_patch(patch, (1-X_CROP)/2,
+                                      (1-Y_CROP)/2, X_CROP, Y_CROP)
+    patch = patch[0:min(patch.shape[0], template.shape[0]),
+                  0:min(patch.shape[1], template.shape[1])]
+    comp_chart = patch
+
+    # determine optimum orientation
+    opts = []
+    for orientation in CHART_ORIENTATIONS:
+        if orientation == 'flip':
+            comp_chart = np.flipud(patch)
+        elif orientation == 'mirror':
+            comp_chart = np.fliplr(patch)
+        elif orientation == 'rotate':
+            comp_chart = np.flipud(np.fliplr(patch))
+        correlation = cv2.matchTemplate(comp_chart, template, cv2.TM_CCOEFF)
+        _, opt_val, _, _ = cv2.minMaxLoc(correlation)
+        if debug:
+            cv2.imwrite('%s_%s.jpg' % (NAME, orientation), comp_chart)
+        print ' %s correlation value: %d' % (orientation, opt_val)
+        opts.append(opt_val)
+
+    # determine if 'nominal' or 'rotated' is best orientation
+    assert_flag = (opts[0] == max(opts) or opts[3] == max(opts))
+    assert assert_flag, ('Optimum orientation is %s' %
+                         CHART_ORIENTATIONS[np.argmax(opts)])
+    # print warning if rotated
+    if opts[3] == max(opts):
+        print 'Image is rotated 180 degrees. Try "rotate" flag.'
+
+
+def main():
+    """Test if image is properly oriented."""
+
+    print '\nStarting test_flip_mirror.py'
+
+    # check skip conditions
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.read_3a(props))
+    # initialize chart class and locate chart in scene
+    chart = its.cv2image.Chart(CHART_FILE, CHART_HEIGHT, CHART_DISTANCE,
+                               CHART_SCALE_START, CHART_SCALE_STOP,
+                               CHART_SCALE_STEP)
+
+    with its.device.ItsSession() as cam:
+        fmt = {'format': 'yuv', 'width': VGA_WIDTH, 'height': VGA_HEIGHT}
+
+        # test that image is not flipped, mirrored, or rotated
+        test_flip_mirror(cam, props, fmt, chart)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/apps/CameraITS/tests/scene3/test_lens_movement_reporting.py b/apps/CameraITS/tests/scene3/test_lens_movement_reporting.py
index cd563be..6fea633 100644
--- a/apps/CameraITS/tests/scene3/test_lens_movement_reporting.py
+++ b/apps/CameraITS/tests/scene3/test_lens_movement_reporting.py
@@ -37,19 +37,20 @@
 CHART_SCALE_STEP = 0.025
 
 
-def test_lens_movement_reporting(cam, props, fmt, sensitivity, exp, af_fd):
+def test_lens_movement_reporting(cam, props, fmt, gain, exp, af_fd, chart):
     """Return fd, sharpness, lens state of the output images.
 
     Args:
         cam: An open device session.
         props: Properties of cam
         fmt: dict; capture format
-        sensitivity: Sensitivity for the 3A request as defined in
+        gain: Sensitivity for the 3A request as defined in
             android.sensor.sensitivity
         exp: Exposure time for the 3A request as defined in
             android.sensor.exposureTime
         af_fd: Focus distance for the 3A request as defined in
             android.lens.focusDistance
+        chart: Object that contains chart information
 
     Returns:
         Object containing reported sharpness of the output image, keyed by
@@ -57,15 +58,6 @@
             'sharpness'
     """
 
-    # initialize chart class
-    chart = its.cv2image.Chart(CHART_FILE, CHART_HEIGHT, CHART_DISTANCE,
-                               CHART_SCALE_START, CHART_SCALE_STOP,
-                               CHART_SCALE_STEP)
-
-    # find chart location
-    xnorm, ynorm, wnorm, hnorm = chart.locate(cam, props, fmt, sensitivity,
-                                              exp, af_fd)
-
     # initialize variables and take data sets
     data_set = {}
     white_level = int(props['android.sensor.info.whiteLevel'])
@@ -74,7 +66,7 @@
     fds = sorted(fds * NUM_IMGS)
     reqs = []
     for i, fd in enumerate(fds):
-        reqs.append(its.objects.manual_capture_request(sensitivity, exp))
+        reqs.append(its.objects.manual_capture_request(gain, exp))
         reqs[i]['android.lens.focusDistance'] = fd
     caps = cam.do_capture(reqs, fmt)
     for i, cap in enumerate(caps):
@@ -92,12 +84,12 @@
         print ' current lens location (diopters): %.3f' % data['loc']
         print ' lens moving %r' % data['lens_moving']
         y, _, _ = its.image.convert_capture_to_planes(cap, props)
-        y = its.image.flip_mirror_img_per_argv(y)
-        chart = its.image.normalize_img(its.image.get_image_patch(y,
-                                                                  xnorm, ynorm,
-                                                                  wnorm, hnorm))
-        its.image.write_image(chart, '%s_i=%d_chart.jpg' % (NAME, i))
-        data['sharpness'] = white_level*its.image.compute_image_sharpness(chart)
+        y = its.image.rotate_img_per_argv(y)
+        chart.img = its.image.normalize_img(its.image.get_image_patch(
+                y, chart.xnorm, chart.ynorm, chart.wnorm, chart.hnorm))
+        its.image.write_image(chart.img, '%s_i=%d_chart.jpg' % (NAME, i))
+        data['sharpness'] = white_level*its.image.compute_image_sharpness(
+                chart.img)
         print 'Chart sharpness: %.1f\n' % data['sharpness']
         data_set[i] = data
     return data_set
@@ -110,18 +102,27 @@
     """
 
     print '\nStarting test_lens_movement_reporting.py'
+    # check skip conditions
     with its.device.ItsSession() as cam:
         props = cam.get_camera_properties()
         its.caps.skip_unless(not its.caps.fixed_focus(props))
-        its.caps.skip_unless(its.caps.lens_approx_calibrated(props))
+        its.caps.skip_unless(its.caps.read_3a(props) and
+                             its.caps.lens_approx_calibrated(props))
+    # initialize chart class
+    chart = its.cv2image.Chart(CHART_FILE, CHART_HEIGHT, CHART_DISTANCE,
+                               CHART_SCALE_START, CHART_SCALE_STOP,
+                               CHART_SCALE_STEP)
+
+    with its.device.ItsSession() as cam:
+        mono_camera = its.caps.mono_camera(props)
         min_fd = props['android.lens.info.minimumFocusDistance']
         fmt = {'format': 'yuv', 'width': VGA_WIDTH, 'height': VGA_HEIGHT}
 
         # Get proper sensitivity, exposure time, and focus distance with 3A.
-        s, e, _, _, fd = cam.do_3a(get_results=True)
+        s, e, _, _, fd = cam.do_3a(get_results=True, mono_camera=mono_camera)
 
         # Get sharpness for each focal distance
-        d = test_lens_movement_reporting(cam, props, fmt, s, e, fd)
+        d = test_lens_movement_reporting(cam, props, fmt, s, e, fd, chart)
         for k in sorted(d):
             print ('i: %d\tfd: %.3f\tlens location (diopters): %.3f \t'
                    'sharpness: %.1f  \tlens_moving: %r \t'
@@ -162,7 +163,8 @@
         assert np.isclose(min_sharp, max_sharp, rtol=SHARPNESS_TOL)
         # assert reported location is close to assign location for af_fd
         print 'Asserting lens location close to assigned fd for af_fd data'
-        assert np.isclose(d_af_fd[0]['loc'], d_af_fd[0]['fd'],
+        first_key = min(d_af_fd.keys())  # finds 1st non-moving frame
+        assert np.isclose(d_af_fd[first_key]['loc'], d_af_fd[first_key]['fd'],
                           rtol=POSITION_TOL)
 
         # assert reported location is close for min_fd captures
diff --git a/apps/CameraITS/tests/scene3/test_lens_position.py b/apps/CameraITS/tests/scene3/test_lens_position.py
index f850e3d..3978081 100644
--- a/apps/CameraITS/tests/scene3/test_lens_position.py
+++ b/apps/CameraITS/tests/scene3/test_lens_position.py
@@ -23,8 +23,8 @@
 
 NUM_TRYS = 2
 NUM_STEPS = 6
-SHARPNESS_TOL = 10  # percentage
-POSITION_TOL = 10  # percentage
+SHARPNESS_TOL = 0.1
+POSITION_TOL = 0.1
 FRAME_TIME_TOL = 10  # ms
 VGA_WIDTH = 640
 VGA_HEIGHT = 480
@@ -38,7 +38,7 @@
 CHART_SCALE_STEP = 0.025
 
 
-def test_lens_position(cam, props, fmt, sensitivity, exp, af_fd):
+def test_lens_position(cam, props, fmt, sensitivity, exp, chart):
     """Return fd, sharpness, lens state of the output images.
 
     Args:
@@ -49,8 +49,7 @@
             android.sensor.sensitivity
         exp: Exposure time for the 3A request as defined in
             android.sensor.exposureTime
-        af_fd: Focus distance for the 3A request as defined in
-            android.lens.focusDistance
+        chart: Object with chart properties
 
     Returns:
         Dictionary of results for different focal distance captures
@@ -58,15 +57,6 @@
         d_static, d_moving
     """
 
-    # initialize chart class
-    chart = its.cv2image.Chart(CHART_FILE, CHART_HEIGHT, CHART_DISTANCE,
-                                CHART_SCALE_START, CHART_SCALE_STOP,
-                                CHART_SCALE_STEP)
-
-    # find chart location
-    xnorm, ynorm, wnorm, hnorm = chart.locate(cam, props, fmt, sensitivity,
-                                              exp, af_fd)
-
     # initialize variables and take data sets
     data_static = {}
     data_moving = {}
@@ -89,11 +79,11 @@
         print ' focus distance (diopters): %.3f' % data['fd']
         print ' current lens location (diopters): %.3f' % data['loc']
         y, _, _ = its.image.convert_capture_to_planes(cap, props)
-        chart = its.image.normalize_img(its.image.get_image_patch(y,
-                                                                  xnorm, ynorm,
-                                                                  wnorm, hnorm))
-        its.image.write_image(chart, '%s_stat_i=%d_chart.jpg' % (NAME, i))
-        data['sharpness'] = white_level*its.image.compute_image_sharpness(chart)
+        chart.img = its.image.normalize_img(its.image.get_image_patch(
+                y, chart.xnorm, chart.ynorm, chart.wnorm, chart.hnorm))
+        its.image.write_image(chart.img, '%s_stat_i=%d_chart.jpg' % (NAME, i))
+        data['sharpness'] = white_level*its.image.compute_image_sharpness(
+                chart.img)
         print 'Chart sharpness: %.1f\n' % data['sharpness']
         data_static[i] = data
     # take moving data set
@@ -115,12 +105,12 @@
         print ' focus distance (diopters): %.3f' % data['fd']
         print ' current lens location (diopters): %.3f' % data['loc']
         y, _, _ = its.image.convert_capture_to_planes(cap, props)
-        y = its.image.flip_mirror_img_per_argv(y)
-        chart = its.image.normalize_img(its.image.get_image_patch(y,
-                                                                  xnorm, ynorm,
-                                                                  wnorm, hnorm))
-        its.image.write_image(chart, '%s_move_i=%d_chart.jpg' % (NAME, i))
-        data['sharpness'] = white_level*its.image.compute_image_sharpness(chart)
+        y = its.image.rotate_img_per_argv(y)
+        chart.img = its.image.normalize_img(its.image.get_image_patch(
+                y, chart.xnorm, chart.ynorm, chart.wnorm, chart.hnorm))
+        its.image.write_image(chart.img, '%s_move_i=%d_chart.jpg' % (NAME, i))
+        data['sharpness'] = white_level*its.image.compute_image_sharpness(
+                chart.img)
         print 'Chart sharpness: %.1f\n' % data['sharpness']
         data_moving[i] = data
     return data_static, data_moving
@@ -128,19 +118,27 @@
 
 def main():
     """Test if focus position is properly reported for moving lenses."""
-
     print '\nStarting test_lens_position.py'
+    # check skip conditions
     with its.device.ItsSession() as cam:
         props = cam.get_camera_properties()
         its.caps.skip_unless(not its.caps.fixed_focus(props))
-        its.caps.skip_unless(its.caps.lens_calibrated(props))
+        its.caps.skip_unless(its.caps.read_3a(props) and
+                             its.caps.lens_calibrated(props))
+    # initialize chart class
+    chart = its.cv2image.Chart(CHART_FILE, CHART_HEIGHT, CHART_DISTANCE,
+                               CHART_SCALE_START, CHART_SCALE_STOP,
+                               CHART_SCALE_STEP)
+
+    with its.device.ItsSession() as cam:
+        mono_camera = its.caps.mono_camera(props)
         fmt = {'format': 'yuv', 'width': VGA_WIDTH, 'height': VGA_HEIGHT}
 
-        # Get proper sensitivity, exposure time, and focus distance with 3A.
-        s, e, _, _, fd = cam.do_3a(get_results=True)
+        # Get proper sensitivity and exposure time with 3A
+        s, e, _, _, _ = cam.do_3a(get_results=True, mono_camera=mono_camera)
 
         # Get sharpness for each focal distance
-        d_stat, d_move = test_lens_position(cam, props, fmt, s, e, fd)
+        d_stat, d_move = test_lens_position(cam, props, fmt, s, e, chart)
         print 'Lens stationary'
         for k in sorted(d_stat):
             print ('i: %d\tfd: %.3f\tlens location (diopters): %.3f \t'
@@ -161,14 +159,19 @@
         print 'Asserting static lens locations/sharpness are similar'
         for i in range(len(d_stat)/2):
             j = 2 * NUM_STEPS - 1 - i
-            print (' lens position: %.3f'
-                   % d_stat[i]['fd'])
+            rw_msg = 'fd_write: %.3f, fd_read: %.3f, RTOL: %.2f' % (
+                    d_stat[i]['fd'], d_stat[i]['loc'], POSITION_TOL)
+            fr_msg = 'loc_fwd: %.3f, loc_rev: %.3f, RTOL: %.2f' % (
+                    d_stat[i]['loc'], d_stat[j]['loc'], POSITION_TOL)
+            s_msg = 'sharpness_fwd: %.3f, sharpness_rev: %.3f, RTOL: %.2f' % (
+                    d_stat[i]['sharpness'], d_stat[j]['sharpness'],
+                    SHARPNESS_TOL)
             assert np.isclose(d_stat[i]['loc'], d_stat[i]['fd'],
-                              rtol=POSITION_TOL/100.0)
+                              rtol=POSITION_TOL), rw_msg
             assert np.isclose(d_stat[i]['loc'], d_stat[j]['loc'],
-                              rtol=POSITION_TOL/100.0)
+                              rtol=POSITION_TOL), fr_msg
             assert np.isclose(d_stat[i]['sharpness'], d_stat[j]['sharpness'],
-                              rtol=SHARPNESS_TOL/100.0)
+                              rtol=SHARPNESS_TOL), s_msg
         # assert moving frames approximately consecutive with even distribution
         print 'Asserting moving frames are consecutive'
         times = [v['timestamp'] for v in d_move.itervalues()]
@@ -177,9 +180,10 @@
         # assert reported location/sharpness is correct in moving frames
         print 'Asserting moving lens locations/sharpness are similar'
         for i in range(len(d_move)):
-            print ' lens position: %.3f' % d_stat[i]['fd']
+            m_msg = 'static: %.3f, moving: %.3f, RTOL: %.2f' % (
+                    d_stat[i]['loc'], d_move[i]['loc'], POSITION_TOL)
             assert np.isclose(d_stat[i]['loc'], d_move[i]['loc'],
-                              rtol=POSITION_TOL)
+                              rtol=POSITION_TOL), m_msg
             if d_move[i]['lens_moving'] and i > 0:
                 if d_stat[i]['sharpness'] > d_stat[i-1]['sharpness']:
                     assert (d_stat[i]['sharpness']*(1.0+SHARPNESS_TOL) >
@@ -190,8 +194,9 @@
                             d_move[i]['sharpness'] >
                             d_stat[i]['sharpness']*(1.0-SHARPNESS_TOL))
             elif not d_move[i]['lens_moving']:
-                assert np.isclose(d_stat[i]['sharpness'],
-                                  d_move[i]['sharpness'], rtol=SHARPNESS_TOL)
+                assert np.isclose(
+                        d_stat[i]['sharpness'], d_move[i]['sharpness'],
+                        rtol=SHARPNESS_TOL)
             else:
                 raise its.error.Error('Lens is moving at frame 0!')
 
diff --git a/apps/CameraITS/tests/scene3/test_reprocess_edge_enhancement.py b/apps/CameraITS/tests/scene3/test_reprocess_edge_enhancement.py
index 95fc636..049426a 100644
--- a/apps/CameraITS/tests/scene3/test_reprocess_edge_enhancement.py
+++ b/apps/CameraITS/tests/scene3/test_reprocess_edge_enhancement.py
@@ -101,6 +101,7 @@
                              (its.caps.yuv_reprocess(props) or
                               its.caps.private_reprocess(props)))
 
+        mono_camera = its.caps.mono_camera(props)
         # If reprocessing is supported, ZSL EE mode must be avaiable.
         assert(its.caps.edge_mode(props, 3))
 
@@ -114,7 +115,7 @@
         out_surface = {"width":size[0], "height":size[1], "format":"jpg"}
 
         # Get proper sensitivity, exposure time, and focus distance.
-        s,e,_,_,fd = cam.do_3a(get_results=True)
+        s,e,_,_,fd = cam.do_3a(get_results=True, mono_camera=mono_camera)
 
         # Get the sharpness for each edge mode for regular requests
         sharpness_regular = []
diff --git a/apps/CameraITS/tests/scene4/scene4_0.67_scaled.pdf b/apps/CameraITS/tests/scene4/scene4_0.67_scaled.pdf
new file mode 100644
index 0000000..7fb1e42
--- /dev/null
+++ b/apps/CameraITS/tests/scene4/scene4_0.67_scaled.pdf
Binary files differ
diff --git a/apps/CameraITS/tests/scene4/test_aspect_ratio_and_crop.py b/apps/CameraITS/tests/scene4/test_aspect_ratio_and_crop.py
index 06db8bb..564e3e7 100644
--- a/apps/CameraITS/tests/scene4/test_aspect_ratio_and_crop.py
+++ b/apps/CameraITS/tests/scene4/test_aspect_ratio_and_crop.py
@@ -12,18 +12,172 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import its.image
-import its.caps
-import its.device
-import its.objects
+import math
 import os.path
 import cv2
+import its.caps
+import its.device
+import its.image
+import its.objects
 import numpy as np
 
+FMT_ATOL = 0.01  # Absolute tolerance on format ratio
+AR_CHECKED = ["4:3", "16:9", "18:9"]  # Aspect ratios checked
+FOV_PERCENT_RTOL = 0.15  # Relative tolerance on circle FoV % to expected
+LARGE_SIZE = 2000   # Define the size of a large image
+NAME = os.path.basename(__file__).split(".")[0]
+NUM_DISTORT_PARAMS = 5
+THRESH_L_AR = 0.02  # aspect ratio test threshold of large images
+THRESH_XS_AR = 0.075  # aspect ratio test threshold of mini images
+THRESH_L_CP = 0.02  # Crop test threshold of large images
+THRESH_XS_CP = 0.075  # Crop test threshold of mini images
+THRESH_MIN_PIXEL = 4  # Crop test allowed offset
+PREVIEW_SIZE = (1920, 1080)  # preview size
+
+
+def convert_ar_to_float(ar_string):
+    """Convert aspect ratio string into float.
+
+    Args:
+        ar_string:  "4:3" or "16:9"
+    Returns:
+        float(ar_string)
+    """
+    ar_list = [float(x) for x in ar_string.split(":")]
+    return ar_list[0] / ar_list[1]
+
+
+def determine_sensor_aspect_ratio(props):
+    """Determine the aspect ratio of the sensor.
+
+    Args:
+        props:      camera properties
+    Returns:
+        matched entry in AR_CHECKED
+    """
+    match_ar = None
+    sensor_size = props["android.sensor.info.preCorrectionActiveArraySize"]
+    sensor_ar = (float(abs(sensor_size["right"] - sensor_size["left"])) /
+                 abs(sensor_size["bottom"] - sensor_size["top"]))
+    for ar_string in AR_CHECKED:
+        if np.isclose(sensor_ar, convert_ar_to_float(ar_string), atol=FMT_ATOL):
+            match_ar = ar_string
+    if not match_ar:
+        print "Warning! RAW aspect ratio not in:", AR_CHECKED
+    return match_ar
+
+
+def aspect_ratio_scale_factors(ref_ar_string, props):
+    """Determine scale factors for each aspect ratio to correct cropping.
+
+    Args:
+        ref_ar_string:      camera aspect ratio that is the reference
+        props:              camera properties
+    Returns:
+        dict of correction ratios with AR_CHECKED values as keys
+    """
+    ref_ar = convert_ar_to_float(ref_ar_string)
+
+    # find sensor area
+    height_max = 0
+    width_max = 0
+    for ar_string in AR_CHECKED:
+        match_ar = [float(x) for x in ar_string.split(":")]
+        try:
+            f = its.objects.get_largest_jpeg_format(props, match_ar=match_ar)
+            if f["height"] > height_max:
+                height_max = f["height"]
+            if f["width"] > width_max:
+                width_max = f["width"]
+        except IndexError:
+            continue
+    sensor_ar = float(width_max) / height_max
+
+    # apply scaling
+    ar_scaling = {}
+    for ar_string in AR_CHECKED:
+        target_ar = convert_ar_to_float(ar_string)
+        # scale down to sensor with greater (or equal) dims
+        if ref_ar >= sensor_ar:
+            scaling = sensor_ar / ref_ar
+        else:
+            scaling = ref_ar / sensor_ar
+
+        # scale up due to cropping to other format
+        if target_ar >= sensor_ar:
+            scaling = scaling * target_ar / sensor_ar
+        else:
+            scaling = scaling * sensor_ar / target_ar
+
+        ar_scaling[ar_string] = scaling
+    return ar_scaling
+
+
+def find_jpeg_fov_reference(cam, req, props):
+    """Determine the circle coverage of the image in JPEG reference image.
+
+    Args:
+        cam:        camera object
+        req:        camera request
+        props:      camera properties
+
+    Returns:
+        ref_fov:    dict with [fmt, % coverage, w, h]
+    """
+    ref_fov = {}
+    fmt_dict = {}
+
+    # find number of pixels in different formats
+    for ar in AR_CHECKED:
+        match_ar = [float(x) for x in ar.split(":")]
+        try:
+            f = its.objects.get_largest_jpeg_format(props, match_ar=match_ar)
+            fmt_dict[f["height"]*f["width"]] = {"fmt": f, "ar": ar}
+        except IndexError:
+            continue
+
+    # use image with largest coverage as reference
+    ar_max_pixels = max(fmt_dict, key=int)
+
+    # capture and determine circle area in image
+    cap = cam.do_capture(req, fmt_dict[ar_max_pixels]["fmt"])
+    w = cap["width"]
+    h = cap["height"]
+    fmt = cap["format"]
+
+    img = its.image.convert_capture_to_rgb_image(cap, props=props)
+    print "Captured %s %dx%d" % (fmt, w, h)
+    img_name = "%s_%s_w%d_h%d.png" % (NAME, fmt, w, h)
+    _, _, circle_size = measure_aspect_ratio(img, False, img_name, True)
+    fov_percent = calc_circle_image_ratio(circle_size[1], circle_size[0], w, h)
+    ref_fov["fmt"] = fmt_dict[ar_max_pixels]["ar"]
+    ref_fov["percent"] = fov_percent
+    ref_fov["w"] = w
+    ref_fov["h"] = h
+    print "Using JPEG reference:", ref_fov
+    return ref_fov
+
+
+def calc_circle_image_ratio(circle_w, circle_h, image_w, image_h):
+    """Calculate the circle coverage of the image.
+
+    Args:
+        circle_w (int):      width of circle
+        circle_h (int):      height of circle
+        image_w (int):       width of image
+        image_h (int):       height of image
+    Returns:
+        fov_percent (float): % of image covered by circle
+    """
+    circle_area = math.pi * math.pow(np.mean([circle_w, circle_h])/2.0, 2)
+    image_area = image_w * image_h
+    fov_percent = 100*circle_area/image_area
+    return fov_percent
+
 
 def main():
-    """ Test aspect ratio and check if images are cropped correctly under each
-    output size
+    """Test aspect ratio & check if images are cropped correctly for each fmt.
+
     Aspect ratio test runs on level3, full and limited devices. Crop test only
     runs on full and level3 devices.
     The test image is a black circle inside a black square. When raw capture is
@@ -33,25 +187,12 @@
     If raw capture is unavailable, take a picture of the test image right in
     front to eliminate shooting angle effect. the height vs. width ratio for
     the circle should be close to 1. Considering shooting position error, aspect
-    ratio greater than 1.05 or smaller than 0.95 will fail the test.
+    ratio greater than 1+THRESH_*_AR or less than 1-THRESH_*_AR will FAIL.
     """
-    NAME = os.path.basename(__file__).split(".")[0]
-    LARGE_SIZE = 2000   # Define the size of a large image
-    # pass/fail threshold of large size images for aspect ratio test
-    THRES_L_AR_TEST = 0.02
-    # pass/fail threshold of mini size images for aspect ratio test
-    THRES_XS_AR_TEST = 0.05
-    # pass/fail threshold of large size images for crop test
-    THRES_L_CP_TEST = 0.02
-    # pass/fail threshold of mini size images for crop test
-    THRES_XS_CP_TEST = 0.05
-    # Crop test will allow at least THRES_MIN_PIXEL offset
-    THRES_MIN_PIXEL = 4
-    PREVIEW_SIZE = (1920, 1080) # preview size
     aspect_ratio_gt = 1  # ground truth
     failed_ar = []  # streams failed the aspect ration test
-    failed_crop = [] # streams failed the crop test
-    format_list = [] # format list for multiple capture objects.
+    failed_crop = []  # streams failed the crop test
+    format_list = []  # format list for multiple capture objects.
     # Do multi-capture of "iter" and "cmpr". Iterate through all the
     # available sizes of "iter", and only use the size specified for "cmpr"
     # Do single-capture to cover untouched sizes in multi-capture when needed.
@@ -65,24 +206,24 @@
                         "cmpr": "raw", "cmpr_size": None})
     format_list.append({"iter": "jpeg", "iter_max": None,
                         "cmpr": "yuv", "cmpr_size": PREVIEW_SIZE})
+    ref_fov = {}
     with its.device.ItsSession() as cam:
         props = cam.get_camera_properties()
-        # Todo: test for radial distortion enabled devices has not yet been
-        # implemented
-        its.caps.skip_unless(not its.caps.radial_distortion_correction(props))
         its.caps.skip_unless(its.caps.read_3a(props))
         full_device = its.caps.full_or_better(props)
         limited_device = its.caps.limited(props)
         its.caps.skip_unless(full_device or limited_device)
         level3_device = its.caps.level3(props)
         raw_avlb = its.caps.raw16(props)
+        mono_camera = its.caps.mono_camera(props)
         run_crop_test = (level3_device or full_device) and raw_avlb
         if not run_crop_test:
             print "Crop test skipped"
         debug = its.caps.debug_mode()
         # Converge 3A and get the estimates.
         sens, exp, gains, xform, focus = cam.do_3a(get_results=True,
-                                                   lock_ae=True, lock_awb=True)
+                                                   lock_ae=True, lock_awb=True,
+                                                   mono_camera=mono_camera)
         print "AE sensitivity %d, exposure %dms" % (sens, exp / 1000000.0)
         print "AWB gains", gains
         print "AWB transform", xform
@@ -96,24 +237,86 @@
         # If raw capture is available, use it as ground truth.
         if raw_avlb:
             # Capture full-frame raw. Use its aspect ratio and circle center
-            # location as ground truth for the other jepg or yuv images.
+            # location as ground truth for the other jpeg or yuv images.
+            print "Creating references for fov_coverage from RAW"
             out_surface = {"format": "raw"}
             cap_raw = cam.do_capture(req, out_surface)
             print "Captured %s %dx%d" % ("raw", cap_raw["width"],
                                          cap_raw["height"])
             img_raw = its.image.convert_capture_to_rgb_image(cap_raw,
                                                              props=props)
+            if its.caps.distortion_correction(props):
+                # The intrinsics and distortion coefficients are meant for full
+                # size RAW. Resize back to full size here.
+                img_raw = cv2.resize(img_raw, (0,0), fx=2.0, fy=2.0)
+                # Intrinsic cal is of format: [f_x, f_y, c_x, c_y, s]
+                # [f_x, f_y] is the horizontal and vertical focal lengths,
+                # [c_x, c_y] is the position of the optical axis,
+                # and s is skew of sensor plane vs lens plane.
+                print "Applying intrinsic calibration and distortion params"
+                ical = np.array(props["android.lens.intrinsicCalibration"])
+                msg = "Cannot include lens distortion without intrinsic cal!"
+                assert len(ical) == 5, msg
+                sensor_h = props["android.sensor.info.physicalSize"]["height"]
+                sensor_w = props["android.sensor.info.physicalSize"]["width"]
+                pixel_h = props["android.sensor.info.pixelArraySize"]["height"]
+                pixel_w = props["android.sensor.info.pixelArraySize"]["width"]
+                fd = float(props["android.lens.info.availableFocalLengths"][0])
+                fd_w_pix = pixel_w * fd / sensor_w
+                fd_h_pix = pixel_h * fd / sensor_h
+                # transformation matrix
+                # k = [[f_x, s, c_x],
+                #      [0, f_y, c_y],
+                #      [0,   0,   1]]
+                k = np.array([[ical[0], ical[4], ical[2]],
+                              [0, ical[1], ical[3]],
+                              [0, 0, 1]])
+                print "k:", k
+                e_msg = "fd_w(pixels): %.2f\tcal[0](pixels): %.2f\tTOL=20%%" % (
+                        fd_w_pix, ical[0])
+                assert np.isclose(fd_w_pix, ical[0], rtol=0.20), e_msg
+                e_msg = "fd_h(pixels): %.2f\tcal[1](pixels): %.2f\tTOL=20%%" % (
+                        fd_h_pix, ical[0])
+                assert np.isclose(fd_h_pix, ical[1], rtol=0.20), e_msg
+
+                # distortion
+                rad_dist = props["android.lens.distortion"]
+                print "android.lens.distortion:", rad_dist
+                e_msg = "%s param(s) found. %d expected." % (len(rad_dist),
+                                                             NUM_DISTORT_PARAMS)
+                assert len(rad_dist) == NUM_DISTORT_PARAMS, e_msg
+                opencv_dist = np.array([rad_dist[0], rad_dist[1],
+                                        rad_dist[3], rad_dist[4],
+                                        rad_dist[2]])
+                print "dist:", opencv_dist
+                img_raw = cv2.undistort(img_raw, k, opencv_dist)
             size_raw = img_raw.shape
-            img_name = "%s_%s_w%d_h%d.png" \
-                       % (NAME, "raw", size_raw[1], size_raw[0])
+            w_raw = size_raw[1]
+            h_raw = size_raw[0]
+            img_name = "%s_%s_w%d_h%d.png" % (NAME, "raw", w_raw, h_raw)
             aspect_ratio_gt, cc_ct_gt, circle_size_raw = measure_aspect_ratio(
-                                                         img_raw, 1, img_name,
-                                                         debug)
+                    img_raw, raw_avlb, img_name, debug)
+            raw_fov_percent = calc_circle_image_ratio(
+                    circle_size_raw[1], circle_size_raw[0], w_raw, h_raw)
             # Normalize the circle size to 1/4 of the image size, so that
-            # circle size won"t affect the crop test result
+            # circle size won't affect the crop test result
             factor_cp_thres = (min(size_raw[0:1])/4.0) / max(circle_size_raw)
-            thres_l_cp_test = THRES_L_CP_TEST * factor_cp_thres
-            thres_xs_cp_test = THRES_XS_CP_TEST * factor_cp_thres
+            thres_l_cp_test = THRESH_L_CP * factor_cp_thres
+            thres_xs_cp_test = THRESH_XS_CP * factor_cp_thres
+            # If RAW in AR_CHECKED, use it as reference
+            ref_fov["fmt"] = determine_sensor_aspect_ratio(props)
+            if ref_fov["fmt"]:
+                ref_fov["percent"] = raw_fov_percent
+                ref_fov["w"] = w_raw
+                ref_fov["h"] = h_raw
+                print "Using RAW reference:", ref_fov
+            else:
+                ref_fov = find_jpeg_fov_reference(cam, req, props)
+        else:
+            ref_fov = find_jpeg_fov_reference(cam, req, props)
+
+        # Determine scaling factors for AR calculations
+        ar_scaling = aspect_ratio_scale_factors(ref_fov["fmt"], props)
 
         # Take pictures of each settings with all the image sizes available.
         for fmt in format_list:
@@ -124,7 +327,7 @@
             if dual_target:
                 sizes = its.objects.get_available_output_sizes(
                         fmt_cmpr, props, fmt["cmpr_size"])
-                if len(sizes) == 0: # device might not support RAW
+                if not sizes:  # device might not support RAW
                     continue
                 size_cmpr = sizes[0]
             for size_iter in its.objects.get_available_output_sizes(
@@ -133,10 +336,9 @@
                 h_iter = size_iter[1]
                 # Skip testing same format/size combination
                 # ITS does not handle that properly now
-                if dual_target and \
-                        w_iter == size_cmpr[0] and \
-                        h_iter == size_cmpr[1] and \
-                        fmt_iter == fmt_cmpr:
+                if (dual_target
+                            and w_iter*h_iter == size_cmpr[0]*size_cmpr[1]
+                            and fmt_iter == fmt_cmpr):
                     continue
                 out_surface = [{"width": w_iter,
                                 "height": h_iter,
@@ -150,34 +352,64 @@
                     frm_iter = cap[0]
                 else:
                     frm_iter = cap
-                assert (frm_iter["format"] == fmt_iter)
-                assert (frm_iter["width"] == w_iter)
-                assert (frm_iter["height"] == h_iter)
-                print "Captured %s with %s %dx%d" \
-                        % (fmt_iter, fmt_cmpr, w_iter, h_iter)
+                assert frm_iter["format"] == fmt_iter
+                assert frm_iter["width"] == w_iter
+                assert frm_iter["height"] == h_iter
+                print "Captured %s with %s %dx%d. Compared size: %dx%d" % (
+                        fmt_iter, fmt_cmpr, w_iter, h_iter, size_cmpr[0],
+                        size_cmpr[1])
                 img = its.image.convert_capture_to_rgb_image(frm_iter)
-                img_name = "%s_%s_with_%s_w%d_h%d.png" \
-                           % (NAME, fmt_iter, fmt_cmpr, w_iter, h_iter)
-                aspect_ratio, cc_ct, (cc_w, cc_h) = \
-                        measure_aspect_ratio(img, raw_avlb, img_name,
-                                             debug)
+                if its.caps.distortion_correction(props) and raw_avlb:
+                    w_scale = float(w_iter)/w_raw
+                    h_scale = float(h_iter)/h_raw
+                    k_scale = np.array([[ical[0]*w_scale, ical[4],
+                                         ical[2]*w_scale],
+                                        [0, ical[1]*h_scale, ical[3]*h_scale],
+                                        [0, 0, 1]])
+                    print "k_scale:", k_scale
+                    img = cv2.undistort(img, k_scale, opencv_dist)
+                img_name = "%s_%s_with_%s_w%d_h%d.png" % (NAME,
+                                                          fmt_iter, fmt_cmpr,
+                                                          w_iter, h_iter)
+                aspect_ratio, cc_ct, (cc_w, cc_h) = measure_aspect_ratio(
+                        img, raw_avlb, img_name, debug)
+                # check fov coverage for all fmts in AR_CHECKED
+                fov_percent = calc_circle_image_ratio(
+                        cc_w, cc_h, w_iter, h_iter)
+                for ar_check in AR_CHECKED:
+                    match_ar_list = [float(x) for x in ar_check.split(":")]
+                    match_ar = match_ar_list[0] / match_ar_list[1]
+                    if np.isclose(float(w_iter)/h_iter, match_ar,
+                                  atol=FMT_ATOL):
+                        # scale check value based on aspect ratio
+                        chk_percent = ref_fov["percent"] * ar_scaling[ar_check]
+
+                        msg = "FoV %%: %.2f, Ref FoV %%: %.2f, TOL=%.f%%, " % (
+                                fov_percent, chk_percent,
+                                FOV_PERCENT_RTOL*100)
+                        msg += "img: %dx%d, ref: %dx%d" % (w_iter, h_iter,
+                                                           ref_fov["w"],
+                                                           ref_fov["h"])
+                        assert np.isclose(fov_percent, chk_percent,
+                                          rtol=FOV_PERCENT_RTOL), msg
                 # check pass/fail for aspect ratio
-                # image size >= LARGE_SIZE: use THRES_L_AR_TEST
-                # image size == 0 (extreme case): THRES_XS_AR_TEST
-                # 0 < image size < LARGE_SIZE: scale between THRES_XS_AR_TEST
-                # and THRES_L_AR_TEST
-                thres_ar_test = max(THRES_L_AR_TEST,
-                        THRES_XS_AR_TEST + max(w_iter, h_iter) *
-                        (THRES_L_AR_TEST-THRES_XS_AR_TEST)/LARGE_SIZE)
+                # image size >= LARGE_SIZE: use THRESH_L_AR
+                # image size == 0 (extreme case): THRESH_XS_AR
+                # 0 < image size < LARGE_SIZE: scale between THRESH_XS_AR
+                # and THRESH_L_AR
+                thres_ar_test = max(
+                        THRESH_L_AR, THRESH_XS_AR + max(w_iter, h_iter) *
+                        (THRESH_L_AR-THRESH_XS_AR)/LARGE_SIZE)
                 thres_range_ar = (aspect_ratio_gt-thres_ar_test,
                                   aspect_ratio_gt+thres_ar_test)
-                if aspect_ratio < thres_range_ar[0] \
-                        or aspect_ratio > thres_range_ar[1]:
+                if (aspect_ratio < thres_range_ar[0] or
+                            aspect_ratio > thres_range_ar[1]):
                     failed_ar.append({"fmt_iter": fmt_iter,
                                       "fmt_cmpr": fmt_cmpr,
                                       "w": w_iter, "h": h_iter,
                                       "ar": aspect_ratio,
                                       "valid_range": thres_range_ar})
+                    its.image.write_image(img/255, img_name, True)
 
                 # check pass/fail for crop
                 if run_crop_test:
@@ -185,29 +417,29 @@
                     # image size == 0 (extreme case): thres_xs_cp_test
                     # 0 < image size < LARGE_SIZE: scale between
                     # thres_xs_cp_test and thres_l_cp_test
-                    # Also, allow at least THRES_MIN_PIXEL off to
+                    # Also, allow at least THRESH_MIN_PIXEL off to
                     # prevent threshold being too tight for very
                     # small circle
-                    thres_hori_cp_test = max(thres_l_cp_test,
-                            thres_xs_cp_test + w_iter *
+                    thres_hori_cp_test = max(
+                            thres_l_cp_test, thres_xs_cp_test + w_iter *
                             (thres_l_cp_test-thres_xs_cp_test)/LARGE_SIZE)
-                    min_threshold_h = THRES_MIN_PIXEL / cc_w
+                    min_threshold_h = THRESH_MIN_PIXEL / cc_w
                     thres_hori_cp_test = max(thres_hori_cp_test,
-                            min_threshold_h)
+                                             min_threshold_h)
                     thres_range_h_cp = (cc_ct_gt["hori"]-thres_hori_cp_test,
                                         cc_ct_gt["hori"]+thres_hori_cp_test)
-                    thres_vert_cp_test = max(thres_l_cp_test,
-                            thres_xs_cp_test + h_iter *
+                    thres_vert_cp_test = max(
+                            thres_l_cp_test, thres_xs_cp_test + h_iter *
                             (thres_l_cp_test-thres_xs_cp_test)/LARGE_SIZE)
-                    min_threshold_v = THRES_MIN_PIXEL / cc_h
+                    min_threshold_v = THRESH_MIN_PIXEL / cc_h
                     thres_vert_cp_test = max(thres_vert_cp_test,
-                            min_threshold_v)
+                                             min_threshold_v)
                     thres_range_v_cp = (cc_ct_gt["vert"]-thres_vert_cp_test,
                                         cc_ct_gt["vert"]+thres_vert_cp_test)
-                    if cc_ct["hori"] < thres_range_h_cp[0] \
-                            or cc_ct["hori"] > thres_range_h_cp[1] \
-                            or cc_ct["vert"] < thres_range_v_cp[0] \
-                            or cc_ct["vert"] > thres_range_v_cp[1]:
+                    if (cc_ct["hori"] < thres_range_h_cp[0]
+                                or cc_ct["hori"] > thres_range_h_cp[1]
+                                or cc_ct["vert"] < thres_range_v_cp[0]
+                                or cc_ct["vert"] > thres_range_v_cp[1]):
                         failed_crop.append({"fmt_iter": fmt_iter,
                                             "fmt_cmpr": fmt_cmpr,
                                             "w": w_iter, "h": h_iter,
@@ -215,6 +447,7 @@
                                             "ct_vert": cc_ct["vert"],
                                             "valid_range_h": thres_range_h_cp,
                                             "valid_range_v": thres_range_v_cp})
+                        its.image.write_image(img/255, img_name, True)
 
         # Print aspect ratio test results
         failed_image_number_for_aspect_ratio_test = len(failed_ar)
@@ -223,33 +456,34 @@
             print "Images failed in the aspect ratio test:"
             print "Aspect ratio value: width / height"
         for fa in failed_ar:
-            print "%s with %s %dx%d: %.3f; valid range: %.3f ~ %.3f" % \
-                  (fa["fmt_iter"], fa["fmt_cmpr"], fa["w"], fa["h"], fa["ar"],
-                   fa["valid_range"][0], fa["valid_range"][1])
+            print "%s with %s %dx%d: %.3f;" % (fa["fmt_iter"], fa["fmt_cmpr"],
+                                               fa["w"], fa["h"], fa["ar"]),
+            print "valid range: %.3f ~ %.3f" % (fa["valid_range"][0],
+                                                fa["valid_range"][1])
 
         # Print crop test results
         failed_image_number_for_crop_test = len(failed_crop)
         if failed_image_number_for_crop_test > 0:
             print "\nCrop test summary"
             print "Images failed in the crop test:"
-            print "Circle center position, (horizontal x vertical), listed " \
-                  "below is relative to the image center."
+            print "Circle center position, (horizontal x vertical), listed",
+            print "below is relative to the image center."
         for fc in failed_crop:
-            print "%s with %s %dx%d: %.3f x %.3f; " \
-                    "valid horizontal range: %.3f ~ %.3f; " \
-                    "valid vertical range: %.3f ~ %.3f" \
-                    % (fc["fmt_iter"], fc["fmt_cmpr"], fc["w"], fc["h"],
-                    fc["ct_hori"], fc["ct_vert"], fc["valid_range_h"][0],
-                    fc["valid_range_h"][1], fc["valid_range_v"][0],
-                    fc["valid_range_v"][1])
+            print "%s with %s %dx%d: %.3f x %.3f;" % (
+                    fc["fmt_iter"], fc["fmt_cmpr"], fc["w"], fc["h"],
+                    fc["ct_hori"], fc["ct_vert"]),
+            print "valid horizontal range: %.3f ~ %.3f;" % (
+                    fc["valid_range_h"][0], fc["valid_range_h"][1]),
+            print "valid vertical range: %.3f ~ %.3f" % (
+                    fc["valid_range_v"][0], fc["valid_range_v"][1])
 
-        assert (failed_image_number_for_aspect_ratio_test == 0)
+        assert failed_image_number_for_aspect_ratio_test == 0
         if level3_device:
-            assert (failed_image_number_for_crop_test == 0)
+            assert failed_image_number_for_crop_test == 0
 
 
 def measure_aspect_ratio(img, raw_avlb, img_name, debug):
-    """ Measure the aspect ratio of the black circle in the test image.
+    """Measure the aspect ratio of the black circle in the test image.
 
     Args:
         img: Numpy float image array in RGB, with pixel values in [0,1].
@@ -263,22 +497,22 @@
         (circle_w, circle_h): tuple of the circle size
     """
     size = img.shape
-    img = img * 255
+    img *= 255
     # Gray image
-    img_gray = 0.299 * img[:,:,2] + 0.587 * img[:,:,1] + 0.114 * img[:,:,0]
+    img_gray = 0.299*img[:, :, 2] + 0.587*img[:, :, 1] + 0.114*img[:, :, 0]
 
     # otsu threshold to binarize the image
-    ret3, img_bw = cv2.threshold(np.uint8(img_gray), 0, 255,
-            cv2.THRESH_BINARY + cv2.THRESH_OTSU)
+    _, img_bw = cv2.threshold(np.uint8(img_gray), 0, 255,
+                              cv2.THRESH_BINARY + cv2.THRESH_OTSU)
 
     # connected component
     cv2_version = cv2.__version__
-    if cv2_version.startswith('2.4.'):
+    if cv2_version.startswith("2.4."):
         contours, hierarchy = cv2.findContours(255-img_bw, cv2.RETR_TREE,
-                cv2.CHAIN_APPROX_SIMPLE)
-    elif cv2_version.startswith('3.2.'):
+                                               cv2.CHAIN_APPROX_SIMPLE)
+    elif cv2_version.startswith("3.2."):
         _, contours, hierarchy = cv2.findContours(255-img_bw, cv2.RETR_TREE,
-                cv2.CHAIN_APPROX_SIMPLE)
+                                                  cv2.CHAIN_APPROX_SIMPLE)
 
     # Check each component and find the black circle
     min_cmpt = size[0] * size[1] * 0.005
@@ -291,8 +525,8 @@
         # Parental component should exist and the area is acceptable.
         # The coutour of a circle should have at least 5 points
         child_area = cv2.contourArea(ct)
-        if hrch[3] == -1 or child_area < min_cmpt or child_area > max_cmpt or \
-                len(ct) < 15:
+        if (hrch[3] == -1 or child_area < min_cmpt or child_area > max_cmpt
+                    or len(ct) < 15):
             continue
         # Check the shapes of current component and its parent
         child_shape = component_shape(ct)
@@ -308,17 +542,17 @@
         # 5. 0.25*Parent"s area < Child"s area < 0.45*Parent"s area
         # 6. Child is a black, and Parent is white
         # 7. Center of Child and center of parent should overlap
-        if prt_shape["width"] * 0.56 < child_shape["width"] \
-                < prt_shape["width"] * 0.76 \
-                and prt_shape["height"] * 0.56 < child_shape["height"] \
-                < prt_shape["height"] * 0.76 \
-                and child_shape["width"] > 0.1 * size[1] \
-                and child_shape["height"] > 0.1 * size[0] \
-                and 0.30 * prt_area < child_area < 0.50 * prt_area \
-                and img_bw[child_shape["cty"]][child_shape["ctx"]] == 0 \
-                and img_bw[child_shape["top"]][child_shape["left"]] == 255 \
-                and dist_x < 0.1 * child_shape["width"] \
-                and dist_y < 0.1 * child_shape["height"]:
+        if (prt_shape["width"] * 0.56 < child_shape["width"]
+                    < prt_shape["width"] * 0.76
+                    and prt_shape["height"] * 0.56 < child_shape["height"]
+                    < prt_shape["height"] * 0.76
+                    and child_shape["width"] > 0.1 * size[1]
+                    and child_shape["height"] > 0.1 * size[0]
+                    and 0.30 * prt_area < child_area < 0.50 * prt_area
+                    and img_bw[child_shape["cty"]][child_shape["ctx"]] == 0
+                    and img_bw[child_shape["top"]][child_shape["left"]] == 255
+                    and dist_x < 0.1 * child_shape["width"]
+                    and dist_y < 0.1 * child_shape["height"]):
             # If raw capture is not available, check the camera is placed right
             # in front of the test page:
             # 1. Distances between parent and child horizontally on both side,0
@@ -330,13 +564,11 @@
                 dist_right = prt_shape["right"] - child_shape["right"]
                 dist_top = child_shape["top"] - prt_shape["top"]
                 dist_bottom = prt_shape["bottom"] - child_shape["bottom"]
-                if abs(dist_left-dist_right) > 0.05 * child_shape["width"] or \
-                        abs(dist_top-dist_bottom) > \
-                        0.05 * child_shape["height"]:
+                if (abs(dist_left-dist_right) > 0.05 * child_shape["width"]
+                            or abs(dist_top-dist_bottom) > 0.05 * child_shape["height"]):
                     continue
             # Calculate aspect ratio
-            aspect_ratio = float(child_shape["width"]) / \
-                           float(child_shape["height"])
+            aspect_ratio = float(child_shape["width"]) / child_shape["height"]
             circle_ctx = child_shape["ctx"]
             circle_cty = child_shape["cty"]
             circle_w = float(child_shape["width"])
@@ -350,15 +582,15 @@
 
     if num_circle == 0:
         its.image.write_image(img/255, img_name, True)
-        print "No black circle was detected. Please take pictures according " \
-              "to instruction carefully!\n"
-        assert (num_circle == 1)
+        print "No black circle was detected. Please take pictures according",
+        print "to instruction carefully!\n"
+        assert num_circle == 1
 
     if num_circle > 1:
         its.image.write_image(img/255, img_name, True)
-        print "More than one black circle was detected. Background of scene " \
-              "may be too complex.\n"
-        assert (num_circle == 1)
+        print "More than one black circle was detected. Background of scene",
+        print "may be too complex.\n"
+        assert num_circle == 1
 
     # draw circle center and image center, and save the image
     line_width = max(1, max(size)/500)
@@ -395,13 +627,13 @@
         its.image.write_image(img/255, img_name, True)
 
     print "Aspect ratio: %.3f" % aspect_ratio
-    print "Circle center position regarding to image center: %.3fx%.3f" % \
-            (cc_ct["vert"], cc_ct["hori"])
+    print "Circle center position wrt to image center:",
+    print "%.3fx%.3f" % (cc_ct["vert"], cc_ct["hori"])
     return aspect_ratio, cc_ct, (circle_w, circle_h)
 
 
 def component_shape(contour):
-    """ Measure the shape for a connected component in the aspect ratio test
+    """Measure the shape for a connected component in the aspect ratio test.
 
     Args:
         contour: return from cv2.findContours. A list of pixel coordinates of
diff --git a/apps/CameraITS/tests/scene4/test_multi_camera_alignment.py b/apps/CameraITS/tests/scene4/test_multi_camera_alignment.py
new file mode 100644
index 0000000..4c3c4d9
--- /dev/null
+++ b/apps/CameraITS/tests/scene4/test_multi_camera_alignment.py
@@ -0,0 +1,450 @@
+# Copyright 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import math
+import os.path
+import re
+import sys
+import cv2
+
+import its.caps
+import its.device
+import its.image
+import its.objects
+
+import numpy as np
+
+ALIGN_TOL_MM = 4.0E-3  # mm
+ALIGN_TOL = 0.01  # multiplied by sensor diagonal to convert to pixels
+CHART_DISTANCE_CM = 22  # cm
+CIRCLE_RTOL = 0.1
+GYRO_REFERENCE = 1
+NAME = os.path.basename(__file__).split('.')[0]
+TRANS_REF_MATRIX = np.array([0, 0, 0])
+
+
+def convert_to_world_coordinates(x, y, r, t, k, z_w):
+    """Convert x,y coordinates to world coordinates.
+
+    Conversion equation is:
+    A = [[x*r[2][0] - dot(k_row0, r_col0), x*r_[2][1] - dot(k_row0, r_col1)],
+         [y*r[2][0] - dot(k_row1, r_col0), y*r_[2][1] - dot(k_row1, r_col1)]]
+    b = [[z_w*dot(k_row0, r_col2) + dot(k_row0, t) - x*(r[2][2]*z_w + t[2])],
+         [z_w*dot(k_row1, r_col2) + dot(k_row1, t) - y*(r[2][2]*z_w + t[2])]]
+
+    [[x_w], [y_w]] = inv(A) * b
+
+    Args:
+        x:      x location in pixel space
+        y:      y location in pixel space
+        r:      rotation matrix
+        t:      translation matrix
+        k:      intrinsic matrix
+        z_w:    z distance in world space
+
+    Returns:
+        x_w:    x in meters in world space
+        y_w:    y in meters in world space
+    """
+    c_1 = r[2, 2] * z_w + t[2]
+    k_x1 = np.dot(k[0, :], r[:, 0])
+    k_x2 = np.dot(k[0, :], r[:, 1])
+    k_x3 = z_w * np.dot(k[0, :], r[:, 2]) + np.dot(k[0, :], t)
+    k_y1 = np.dot(k[1, :], r[:, 0])
+    k_y2 = np.dot(k[1, :], r[:, 1])
+    k_y3 = z_w * np.dot(k[1, :], r[:, 2]) + np.dot(k[1, :], t)
+
+    a = np.array([[x*r[2][0]-k_x1, x*r[2][1]-k_x2],
+                  [y*r[2][0]-k_y1, y*r[2][1]-k_y2]])
+    b = np.array([[k_x3-x*c_1], [k_y3-y*c_1]])
+    return np.dot(np.linalg.inv(a), b)
+
+
+def convert_to_image_coordinates(p_w, r, t, k):
+    p_c = np.dot(r, p_w) + t
+    p_h = np.dot(k, p_c)
+    return p_h[0] / p_h[2], p_h[1] / p_h[2]
+
+
+def rotation_matrix(rotation):
+    """Convert the rotation parameters to 3-axis data.
+
+    Args:
+        rotation:   android.lens.Rotation vector
+    Returns:
+        3x3 matrix w/ rotation parameters
+    """
+    x = rotation[0]
+    y = rotation[1]
+    z = rotation[2]
+    w = rotation[3]
+    return np.array([[1-2*y**2-2*z**2, 2*x*y-2*z*w, 2*x*z+2*y*w],
+                     [2*x*y+2*z*w, 1-2*x**2-2*z**2, 2*y*z-2*x*w],
+                     [2*x*z-2*y*w, 2*y*z+2*x*w, 1-2*x**2-2*y**2]])
+
+
+# TODO: merge find_circle() & test_aspect_ratio_and_crop.measure_aspect_ratio()
+# for a unified circle script that is and in pymodules/image.py
+def find_circle(gray, name):
+    """Find the black circle in the image.
+
+    Args:
+        gray:           numpy grayscale array with pixel values in [0,255].
+        name:           string of file name.
+    Returns:
+        circle:         (circle_center_x, circle_center_y, radius)
+    """
+    size = gray.shape
+    # otsu threshold to binarize the image
+    _, img_bw = cv2.threshold(np.uint8(gray), 0, 255,
+                              cv2.THRESH_BINARY + cv2.THRESH_OTSU)
+
+    # connected component
+    cv2_version = cv2.__version__
+    if cv2_version.startswith('2.4.'):
+        contours, hierarchy = cv2.findContours(255-img_bw, cv2.RETR_TREE,
+                                               cv2.CHAIN_APPROX_SIMPLE)
+    elif cv2_version.startswith('3.2.'):
+        _, contours, hierarchy = cv2.findContours(255-img_bw, cv2.RETR_TREE,
+                                                  cv2.CHAIN_APPROX_SIMPLE)
+
+    # Check each component and find the black circle
+    min_cmpt = size[0] * size[1] * 0.005
+    max_cmpt = size[0] * size[1] * 0.35
+    num_circle = 0
+    for ct, hrch in zip(contours, hierarchy[0]):
+        # The radius of the circle is 1/3 of the length of the square, meaning
+        # around 1/3 of the area of the square
+        # Parental component should exist and the area is acceptable.
+        # The contour of a circle should have at least 5 points
+        child_area = cv2.contourArea(ct)
+        if (hrch[3] == -1 or child_area < min_cmpt or child_area > max_cmpt
+                    or len(ct) < 15):
+            continue
+        # Check the shapes of current component and its parent
+        child_shape = component_shape(ct)
+        parent = hrch[3]
+        prt_shape = component_shape(contours[parent])
+        prt_area = cv2.contourArea(contours[parent])
+        dist_x = abs(child_shape['ctx']-prt_shape['ctx'])
+        dist_y = abs(child_shape['cty']-prt_shape['cty'])
+        # 1. 0.56*Parent's width < Child's width < 0.76*Parent's width.
+        # 2. 0.56*Parent's height < Child's height < 0.76*Parent's height.
+        # 3. Child's width > 0.1*Image width
+        # 4. Child's height > 0.1*Image height
+        # 5. 0.25*Parent's area < Child's area < 0.45*Parent's area
+        # 6. Child is a black, and Parent is white
+        # 7. Center of Child and center of parent should overlap
+        if (prt_shape['width'] * 0.56 < child_shape['width']
+                    < prt_shape['width'] * 0.76
+                    and prt_shape['height'] * 0.56 < child_shape['height']
+                    < prt_shape['height'] * 0.76
+                    and child_shape['width'] > 0.1 * size[1]
+                    and child_shape['height'] > 0.1 * size[0]
+                    and 0.30 * prt_area < child_area < 0.50 * prt_area
+                    and img_bw[child_shape['cty']][child_shape['ctx']] == 0
+                    and img_bw[child_shape['top']][child_shape['left']] == 255
+                    and dist_x < 0.1 * child_shape['width']
+                    and dist_y < 0.1 * child_shape['height']):
+            # Calculate circle center and size
+            circle_ctx = float(child_shape['ctx'])
+            circle_cty = float(child_shape['cty'])
+            circle_w = float(child_shape['width'])
+            circle_h = float(child_shape['height'])
+            num_circle += 1
+            # If more than one circle found, break
+            if num_circle == 2:
+                break
+    its.image.write_image(gray[..., np.newaxis]/255.0, name)
+
+    if num_circle == 0:
+        print 'No black circle was detected. Please take pictures according',
+        print 'to instruction carefully!\n'
+        assert num_circle == 1
+
+    if num_circle > 1:
+        print 'More than one black circle was detected. Background of scene',
+        print 'may be too complex.\n'
+        assert num_circle == 1
+    return (circle_ctx, circle_cty, (circle_w+circle_h)/4.0)
+
+
+def component_shape(contour):
+    """Measure the shape of a connected component.
+
+    Args:
+        contour: return from cv2.findContours. A list of pixel coordinates of
+        the contour.
+
+    Returns:
+        The most left, right, top, bottom pixel location, height, width, and
+        the center pixel location of the contour.
+    """
+    shape = {'left': np.inf, 'right': 0, 'top': np.inf, 'bottom': 0,
+             'width': 0, 'height': 0, 'ctx': 0, 'cty': 0}
+    for pt in contour:
+        if pt[0][0] < shape['left']:
+            shape['left'] = pt[0][0]
+        if pt[0][0] > shape['right']:
+            shape['right'] = pt[0][0]
+        if pt[0][1] < shape['top']:
+            shape['top'] = pt[0][1]
+        if pt[0][1] > shape['bottom']:
+            shape['bottom'] = pt[0][1]
+    shape['width'] = shape['right'] - shape['left'] + 1
+    shape['height'] = shape['bottom'] - shape['top'] + 1
+    shape['ctx'] = (shape['left']+shape['right'])/2
+    shape['cty'] = (shape['top']+shape['bottom'])/2
+    return shape
+
+
+def define_reference_camera(pose_reference, cam_reference):
+    """Determine the reference camera.
+
+    Args:
+        pose_reference: 0 for cameras, 1 for gyro
+        cam_reference:  dict with key of physical camera and value True/False
+    Returns:
+        i_ref:          physical id of reference camera
+        i_2nd:          physical id of secondary camera
+    """
+
+    if pose_reference == GYRO_REFERENCE:
+        print 'pose_reference is GYRO'
+        i_ref = list(cam_reference.keys())[0]  # pick first camera as ref
+        i_2nd = list(cam_reference.keys())[1]
+    else:
+        print 'pose_reference is CAMERA'
+        i_ref = (k for (k, v) in cam_reference.iteritems() if v).next()
+        i_2nd = (k for (k, v) in cam_reference.iteritems() if not v).next()
+    return i_ref, i_2nd
+
+
+def main():
+    """Test the multi camera system parameters related to camera spacing.
+
+    Using the multi-camera physical cameras, take a picture of scene4
+    (a black circle and surrounding square on a white background) with
+    one of the physical cameras. Then find the circle center. Using the
+    parameters:
+        android.lens.poseReference
+        android.lens.poseTranslation
+        android.lens.poseRotation
+        android.lens.instrinsicCalibration
+        android.lens.distortion (if available)
+    project the circle center to the world coordinates for each camera.
+    Compare the difference between the two cameras' circle centers in
+    world coordinates.
+
+    Reproject the world coordinates back to pixel coordinates and compare
+    against originals as a sanity check.
+
+    Compare the circle sizes if the focal lengths of the cameras are
+    different using
+        android.lens.availableFocalLengths.
+    """
+    chart_distance = CHART_DISTANCE_CM
+    for s in sys.argv[1:]:
+        if s[:5] == 'dist=' and len(s) > 5:
+            chart_distance = float(re.sub('cm', '', s[5:]))
+            print 'Using chart distance: %.1fcm' % chart_distance
+    chart_distance *= 1.0E-2
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.compute_target_exposure(props) and
+                             its.caps.per_frame_control(props) and
+                             its.caps.logical_multi_camera(props) and
+                             its.caps.raw16(props) and
+                             its.caps.manual_sensor(props))
+        debug = its.caps.debug_mode()
+        avail_fls = props['android.lens.info.availableFocalLengths']
+        pose_reference = props['android.lens.poseReference']
+
+        max_raw_size = its.objects.get_available_output_sizes('raw', props)[0]
+        w, h = its.objects.get_available_output_sizes(
+                'yuv', props, match_ar_size=max_raw_size)[0]
+
+        # Do 3A and get the values
+        s, e, _, _, fd = cam.do_3a(get_results=True,
+                                   lock_ae=True, lock_awb=True)
+        e *= 2  # brighten RAW images
+        req = its.objects.manual_capture_request(s, e, fd, True, props)
+
+        # get physical camera properties
+        ids = its.caps.logical_multi_camera_physical_ids(props)
+        props_physical = {}
+        for i in ids:
+            props_physical[i] = cam.get_camera_properties_by_id(i)
+
+        # capture RAWs of 1st 2 cameras
+        cap_raw = {}
+        out_surfaces = [{'format': 'yuv', 'width': w, 'height': h},
+                        {'format': 'raw', 'physicalCamera': ids[0]},
+                        {'format': 'raw', 'physicalCamera': ids[1]}]
+        _, cap_raw[ids[0]], cap_raw[ids[1]] = cam.do_capture(req, out_surfaces)
+
+    size_raw = {}
+    k = {}
+    cam_reference = {}
+    r = {}
+    t = {}
+    circle = {}
+    fl = {}
+    sensor_diag = {}
+    for i in ids:
+        print 'Camera %s' % i
+        # process image
+        img_raw = its.image.convert_capture_to_rgb_image(
+                cap_raw[i], props=props)
+        size_raw[i] = (cap_raw[i]['width'], cap_raw[i]['height'])
+
+        # save images if debug
+        if debug:
+            its.image.write_image(img_raw, '%s_raw_%s.jpg' % (NAME, i))
+
+        # convert to [0, 255] images
+        img_raw *= 255
+
+        # scale to match calibration data
+        img = cv2.resize(img_raw.astype(np.uint8), None, fx=2, fy=2)
+
+        # load parameters for each physical camera
+        ical = props_physical[i]['android.lens.intrinsicCalibration']
+        assert len(ical) == 5, 'android.lens.instrisicCalibration incorrect.'
+        k[i] = np.array([[ical[0], ical[4], ical[2]],
+                         [0, ical[1], ical[3]],
+                         [0, 0, 1]])
+        print ' k:', k[i]
+
+        rotation = np.array(props_physical[i]['android.lens.poseRotation'])
+        print ' rotation:', rotation
+        assert len(rotation) == 4, 'poseRotation has wrong # of params.'
+        r[i] = rotation_matrix(rotation)
+
+        t[i] = np.array(props_physical[i]['android.lens.poseTranslation'])
+        print ' translation:', t[i]
+        assert len(t[i]) == 3, 'poseTranslation has wrong # of params.'
+        if (t[i] == TRANS_REF_MATRIX).all():
+            cam_reference[i] = True
+        else:
+            cam_reference[i] = False
+
+        # API spec defines poseTranslation as the world coordinate p_w_cam of
+        # optics center. When applying [R|t] to go from world coordinates to
+        # camera coordinates, we need -R*p_w_cam of the coordinate reported in
+        # metadata.
+        # ie. for a camera with optical center at world coordinate (5, 4, 3)
+        # and identity rotation, to convert a world coordinate into the
+        # camera's coordinate, we need a translation vector of [-5, -4, -3]
+        # so that: [I|[-5, -4, -3]^T] * [5, 4, 3]^T = [0,0,0]^T
+        t[i] = -1.0 * np.dot(r[i], t[i])
+        if debug:
+            print 't:', t[i]
+            print 'r:', r[i]
+
+        # Do operation on distorted image
+        print 'Detecting pre-correction circle'
+        circle_distorted = find_circle(cv2.cvtColor(img, cv2.COLOR_BGR2GRAY),
+                                       '%s_gray_precorr_cam_%s.jpg' % (NAME, i))
+        print 'camera %s circle pre-distortion correction: x, y: %.2f, %.2f' % (
+                i, circle_distorted[0], circle_distorted[1])
+
+        # Apply correction to image (if available)
+        if its.caps.distortion_correction(props):
+            distort = np.array(props_physical[i]['android.lens.distortion'])
+            assert len(distort) == 5, 'distortion has wrong # of params.'
+            cv2_distort = np.array([distort[0], distort[1],
+                                    distort[3], distort[4],
+                                    distort[2]])
+            print ' cv2 distortion params:', cv2_distort
+            its.image.write_image(img/255.0, '%s_raw_%s.jpg' % (
+                    NAME, i))
+            img = cv2.undistort(img, k[i], cv2_distort)
+            its.image.write_image(img/255.0, '%s_correct_%s.jpg' % (
+                    NAME, i))
+
+        # Find the circles in grayscale image
+        circle[i] = find_circle(cv2.cvtColor(img, cv2.COLOR_BGR2GRAY),
+                                '%s_gray_%s.jpg' % (NAME, i))
+
+        # Find focal length & sensor size
+        fl[i] = props_physical[i]['android.lens.info.availableFocalLengths'][0]
+        sensor_diag[i] = math.sqrt(size_raw[i][0] ** 2 + size_raw[i][1] ** 2)
+
+    i_ref, i_2nd = define_reference_camera(pose_reference, cam_reference)
+    print 'reference camera: %s, secondary camera: %s' % (i_ref, i_2nd)
+
+    # Convert circle centers to real world coordinates
+    x_w = {}
+    y_w = {}
+    if props['android.lens.facing']:
+        print 'lens facing BACK'
+        chart_distance *= -1  # API spec defines +z i pointing out from screen
+    for i in [i_ref, i_2nd]:
+        x_w[i], y_w[i] = convert_to_world_coordinates(
+                circle[i][0], circle[i][1], r[i], t[i], k[i], chart_distance)
+
+    # Back convert to image coordinates for sanity check
+    x_p = {}
+    y_p = {}
+    x_p[i_2nd], y_p[i_2nd] = convert_to_image_coordinates(
+            [x_w[i_ref], y_w[i_ref], chart_distance],
+            r[i_2nd], t[i_2nd], k[i_2nd])
+    x_p[i_ref], y_p[i_ref] = convert_to_image_coordinates(
+            [x_w[i_2nd], y_w[i_2nd], chart_distance],
+            r[i_ref], t[i_ref], k[i_ref])
+
+    # Summarize results
+    for i in [i_ref, i_2nd]:
+        print ' Camera: %s' % i
+        print ' x, y (pixels): %.1f, %.1f' % (circle[i][0], circle[i][1])
+        print ' x_w, y_w (mm): %.2f, %.2f' % (x_w[i]*1.0E3, y_w[i]*1.0E3)
+        print ' x_p, y_p (pixels): %.1f, %.1f' % (x_p[i], y_p[i])
+
+    # Check center locations
+    err = np.linalg.norm(np.array([x_w[i_ref], y_w[i_ref]]) -
+                         np.array([x_w[i_2nd], y_w[i_2nd]]))
+    print '\nCenter location err (mm): %.2f' % (err*1E3)
+    msg = 'Center locations %s <-> %s too different!' % (i_ref, i_2nd)
+    msg += ' val=%.2fmm, THRESH=%.fmm' % (err*1E3, ALIGN_TOL_MM*1E3)
+    assert err < ALIGN_TOL, msg
+
+    # Check projections back into pixel space
+    for i in [i_ref, i_2nd]:
+        err = np.linalg.norm(np.array([circle[i][0], circle[i][1]]) -
+                             np.array([x_p[i], y_p[i]]))
+        print 'Camera %s projection error (pixels): %.1f' % (i, err)
+        tol = ALIGN_TOL * sensor_diag[i]
+        msg = 'Camera %s project locations too different!' % i
+        msg += ' diff=%.2f, TOL=%.2f' % (err, tol)
+        assert err < tol, msg
+
+    # Check focal length and circle size if more than 1 focal length
+    if len(avail_fls) > 1:
+        print 'Circle radii (pixels); ref: %.1f, 2nd: %.1f' % (
+                circle[i_ref][2], circle[i_2nd][2])
+        print 'Focal lengths (diopters); ref: %.2f, 2nd: %.2f' % (
+                fl[i_ref], fl[i_2nd])
+        print 'Sensor diagonals (pixels); ref: %.2f, 2nd: %.2f' % (
+                sensor_diag[i_ref], sensor_diag[i_2nd])
+        msg = 'Circle size scales improperly! RTOL=%.1f' % CIRCLE_RTOL
+        msg += '\nMetric: radius/focal_length*sensor_diag should be equal.'
+        assert np.isclose(circle[i_ref][2]/fl[i_ref]*sensor_diag[i_ref],
+                          circle[i_2nd][2]/fl[i_2nd]*sensor_diag[i_2nd],
+                          rtol=CIRCLE_RTOL), msg
+
+
+if __name__ == '__main__':
+    main()
diff --git a/apps/CameraITS/tests/sensor_fusion/test_multi_camera_frame_sync.py b/apps/CameraITS/tests/sensor_fusion/test_multi_camera_frame_sync.py
new file mode 100644
index 0000000..63ddbdd
--- /dev/null
+++ b/apps/CameraITS/tests/sensor_fusion/test_multi_camera_frame_sync.py
@@ -0,0 +1,186 @@
+# Copyright 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.caps
+from its.cv2image import get_angle
+import its.device
+import its.image
+import its.objects
+import its.target
+
+import cv2
+import matplotlib
+from matplotlib import pylab
+import numpy
+import os
+
+ANGLE_MASK = 10  # degrees
+ANGULAR_DIFF_THRESHOLD = 10  # degrees
+ANGULAR_MOVEMENT_THRESHOLD = 35  # degrees
+NAME = os.path.basename(__file__).split(".")[0]
+NUM_CAPTURES = 100
+W = 640
+H = 480
+CHART_DISTANCE = 25  # cm
+CM_TO_M = 1/100.0
+
+
+def _check_available_capabilities(props):
+    """Returns True if all required test capabilities are present."""
+    return all([
+            its.caps.compute_target_exposure(props),
+            its.caps.per_frame_control(props),
+            its.caps.logical_multi_camera(props),
+            its.caps.raw16(props),
+            its.caps.manual_sensor(props),
+            its.caps.sensor_fusion(props)])
+
+
+def _assert_camera_movement(frame_pairs_angles):
+    """Assert the angles between each frame pair are sufficiently different.
+
+    Different angles is an indication of camera movement.
+    """
+    angles = [i for i, j in frame_pairs_angles]
+    max_angle = numpy.amax(angles)
+    min_angle = numpy.amin(angles)
+    emsg = "Not enough phone movement!\n"
+    emsg += "min angle: %.2f, max angle: %.2f deg, THRESH: %d deg" % (
+            min_angle, max_angle, ANGULAR_MOVEMENT_THRESHOLD)
+    assert max_angle - min_angle > ANGULAR_MOVEMENT_THRESHOLD, emsg
+
+
+def _assert_angular_difference(angle_1, angle_2):
+    """Assert angular difference is within threshold."""
+    diff = abs(angle_2 - angle_1)
+
+    # Assert difference is less than threshold
+    emsg = "Diff between frame pair: %.1f. Threshold: %d deg." % (
+            diff, ANGULAR_DIFF_THRESHOLD)
+    assert diff < ANGULAR_DIFF_THRESHOLD, emsg
+
+
+def _mask_angles_near_extremes(frame_pairs_angles):
+    """Mask out the data near the top and bottom of angle range."""
+    masked_pairs_angles = [[i, j] for i, j in frame_pairs_angles
+                           if ANGLE_MASK <= abs(i) <= 90-ANGLE_MASK and
+                                ANGLE_MASK <= abs(j) <= 90-ANGLE_MASK]
+    return masked_pairs_angles
+
+
+def _plot_frame_pairs_angles(frame_pairs_angles, ids):
+    """Plot the extracted angles."""
+    matplotlib.pyplot.figure("Camera Rotation Angle")
+    cam0_angles = [i for i, j in frame_pairs_angles]
+    cam1_angles = [j for i, j in frame_pairs_angles]
+    pylab.plot(range(len(cam0_angles)), cam0_angles, "r", label="%s" % ids[0])
+    pylab.plot(range(len(cam1_angles)), cam1_angles, "g", label="%s" % ids[1])
+    pylab.legend()
+    pylab.xlabel("Camera frame number")
+    pylab.ylabel("Rotation angle (degrees)")
+    matplotlib.pyplot.savefig("%s_angles_plot.png" % (NAME))
+
+    matplotlib.pyplot.figure("Angle Diffs")
+    angle_diffs = [j-i for i, j in frame_pairs_angles]
+    pylab.plot(range(len(angle_diffs)), angle_diffs, "b",
+               label="cam%s-%s" % (ids[1], ids[0]))
+    pylab.legend()
+    pylab.xlabel("Camera frame number")
+    pylab.ylabel("Rotation angle difference (degrees)")
+    matplotlib.pyplot.savefig("%s_angle_diffs_plot.png" % (NAME))
+
+def _collect_data():
+    """Returns list of pair of gray frames and camera ids used for captures."""
+    yuv_sizes = {}
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+
+        # If capabilities not present, skip.
+        its.caps.skip_unless(_check_available_capabilities(props))
+
+        # Determine return parameters
+        debug = its.caps.debug_mode()
+        ids = its.caps.logical_multi_camera_physical_ids(props)
+
+        # Define capture request
+        s, e, _, _, _ = cam.do_3a(get_results=True, do_af=False)
+        req = its.objects.manual_capture_request(s, e)
+        req["android.lens.focusDistance"] = 1 / (CHART_DISTANCE * CM_TO_M)
+
+        # capture YUVs
+        out_surfaces = [{"format": "yuv", "width": W, "height": H,
+                         "physicalCamera": ids[0]},
+                        {"format": "yuv", "width": W, "height": H,
+                         "physicalCamera": ids[1]}]
+
+        capture_1_list, capture_2_list = cam.do_capture(
+            [req]*NUM_CAPTURES, out_surfaces)
+
+        # Create list of capture pairs. [[cap1A, cap1B], [cap2A, cap2B], ...]
+        frame_pairs = zip(capture_1_list, capture_2_list)
+
+        # Convert captures to grayscale
+        frame_pairs_gray = [
+            [
+                cv2.cvtColor(its.image.convert_capture_to_rgb_image(f, props=props), cv2.COLOR_RGB2GRAY) for f in pair
+            ] for pair in frame_pairs]
+
+        # Save images for debugging
+        if debug:
+            for i, imgs in enumerate(frame_pairs_gray):
+                for j in [0, 1]:
+                    file_name = "%s_%s_%03d.png" % (NAME, ids[j], i)
+                    cv2.imwrite(file_name, imgs[j]*255)
+
+        return frame_pairs_gray, ids
+
+def main():
+    """Test frame timestamps captured by logical camera are within 10ms."""
+    frame_pairs_gray, ids = _collect_data()
+
+    # Compute angles in frame pairs
+    frame_pairs_angles = [
+            [get_angle(p[0]), get_angle(p[1])] for p in frame_pairs_gray]
+
+    # Remove frames where not enough squares were detected.
+    filtered_pairs_angles = []
+    for angle_1, angle_2 in frame_pairs_angles:
+        if angle_1 == None or angle_2 == None:
+            continue
+        filtered_pairs_angles.append([angle_1, angle_2])
+
+    print 'Using {} image pairs to compute angular difference.'.format(
+        len(filtered_pairs_angles))
+
+    assert len(filtered_pairs_angles) > 20, (
+        "Unable to identify enough frames with detected squares.")
+
+    # Mask out data near 90 degrees.
+    # The chessboard angles we compute go from 0 to 89. Meaning,
+    # 90 degrees equals to 0 degrees.
+    # In order to avoid this jump, we ignore any frames at these extremeties.
+    masked_pairs_angles = _mask_angles_near_extremes(filtered_pairs_angles)
+
+    # Plot angles and differences
+    _plot_frame_pairs_angles(filtered_pairs_angles, ids)
+
+    # Ensure camera moved
+    _assert_camera_movement(filtered_pairs_angles)
+
+    # Ensure angle between images from each camera does not change appreciably
+    for cam_1_angle, cam_2_angle in masked_pairs_angles:
+        _assert_angular_difference(cam_1_angle, cam_2_angle)
+
+if __name__ == "__main__":
+    main()
diff --git a/apps/CameraITS/tests/sensor_fusion/test_sensor_fusion.py b/apps/CameraITS/tests/sensor_fusion/test_sensor_fusion.py
index 2e8f38e..fbf7bcd 100644
--- a/apps/CameraITS/tests/sensor_fusion/test_sensor_fusion.py
+++ b/apps/CameraITS/tests/sensor_fusion/test_sensor_fusion.py
@@ -116,7 +116,7 @@
             # Split by comma and convert each dimension to int.
             [w, h] = map(int, s[9:].split(","))
         elif s[:12] == "test_length=" and len(s) > 12:
-            test_length = int(s[12:])
+            test_length = float(s[12:])
 
     # Collect or load the camera+gyro data. All gyro events as well as camera
     # timestamps are in the "events" dictionary, and "frames" is a list of
@@ -345,7 +345,7 @@
         if num_features < MIN_FEATURE_PTS:
             print "Not enough feature points in frame", i
             print "Need at least %d features, got %d" % (
-                    MIN_FEATURE_PTS, num_features)
+                MIN_FEATURE_PTS, num_features)
             assert 0
         else:
             print "Number of features in frame %d is %d" % (i, num_features)
@@ -448,13 +448,17 @@
         fmt = {"format": "yuv", "width": w, "height": h}
         s, e, _, _, _ = cam.do_3a(get_results=True, do_af=False)
         req = its.objects.manual_capture_request(s, e)
-        fps = 30
+        its.objects.turn_slow_filters_off(props, req)
         req["android.lens.focusDistance"] = 1 / (CHART_DISTANCE * CM_TO_M)
         req["android.control.aeTargetFpsRange"] = [fps, fps]
         req["android.sensor.frameDuration"] = int(1000.0/fps * MSEC_TO_NSEC)
-        print "Capturing %dx%d with sens. %d, exp. time %.1fms" % (
-            w, h, s, e*NSEC_TO_MSEC)
-        caps = cam.do_capture([req]*fps*test_length, fmt)
+        print "Capturing %dx%d with sens. %d, exp. time %.1fms at %dfps" % (
+            w, h, s, e*NSEC_TO_MSEC, fps)
+        caps = cam.do_capture([req]*int(fps*test_length), fmt)
+
+        # Capture a bit more gyro samples for use in
+        # get_best_alignment_offset
+        time.sleep(0.2)
 
         # Get the gyro events.
         print "Reading out sensor events"
diff --git a/apps/CameraITS/tools/get_camera_ids.py b/apps/CameraITS/tools/get_camera_ids.py
deleted file mode 100644
index 010b046..0000000
--- a/apps/CameraITS/tools/get_camera_ids.py
+++ /dev/null
@@ -1,37 +0,0 @@
-# Copyright 2015 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import sys
-import its.device
-import its.objects
-import its.image
-
-def main():
-    """get camera ids and save it to disk.
-    """
-    out_path = ""
-    for s in sys.argv[1:]:
-        if s[:4] == "out=" and len(s) > 4:
-            out_path = s[4:]
-    # kind of weird we need to open a camera to get camera ids, but
-    # this is how ITS is working now.
-    with its.device.ItsSession() as cam:
-        camera_ids = cam.get_camera_ids()
-        if out_path != "":
-            with open(out_path, "w") as f:
-                for camera_id in camera_ids:
-                    f.write(camera_id + "\n")
-
-if __name__ == '__main__':
-    main()
diff --git a/apps/CameraITS/tools/load_scene.py b/apps/CameraITS/tools/load_scene.py
index 4e245f4..330b32f 100644
--- a/apps/CameraITS/tools/load_scene.py
+++ b/apps/CameraITS/tools/load_scene.py
@@ -18,16 +18,21 @@
 import sys
 import time
 
+import numpy as np
+
 
 def main():
     """Load charts on device and display."""
-    camera_id = -1
     scene = None
     for s in sys.argv[1:]:
         if s[:6] == 'scene=' and len(s) > 6:
             scene = s[6:]
         elif s[:7] == 'screen=' and len(s) > 7:
             screen_id = s[7:]
+        elif s[:5] == 'dist=' and len(s) > 5:
+            chart_distance = float(re.sub('cm', '', s[5:]))
+        elif s[:4] == 'fov=' and len(s) > 4:
+            camera_fov = float(s[4:])
 
     cmd = ('adb -s %s shell am force-stop com.google.android.apps.docs' %
            screen_id)
@@ -43,8 +48,13 @@
 
     remote_scene_file = '/sdcard/Download/%s.pdf' % scene
     local_scene_file = os.path.join(os.environ['CAMERA_ITS_TOP'], 'tests',
-                                    scene, scene+'.pdf')
-    print 'Loading %s on %s' % (remote_scene_file, screen_id)
+                                    scene)
+    if np.isclose(chart_distance, 20, rtol=0.1) and camera_fov < 90:
+        local_scene_file = os.path.join(local_scene_file,
+                                        scene+'_0.67_scaled.pdf')
+    else:
+        local_scene_file = os.path.join(local_scene_file, scene+'.pdf')
+    print 'Loading %s on %s' % (local_scene_file, screen_id)
     cmd = 'adb -s %s push %s /mnt%s' % (screen_id, local_scene_file,
                                         remote_scene_file)
     subprocess.Popen(cmd.split())
diff --git a/apps/CameraITS/tools/run_all_tests.py b/apps/CameraITS/tools/run_all_tests.py
index 47f7296..b6fdde2 100644
--- a/apps/CameraITS/tools/run_all_tests.py
+++ b/apps/CameraITS/tools/run_all_tests.py
@@ -13,21 +13,86 @@
 # limitations under the License.
 
 import copy
+import math
 import os
 import os.path
-import tempfile
+import re
 import subprocess
-import time
 import sys
+import tempfile
+import time
 
 import its.caps
+import its.cv2image
 import its.device
 from its.device import ItsSession
+import its.image
+
+import numpy as np
 
 CHART_DELAY = 1  # seconds
+CHART_DISTANCE = 30.0  # cm
+CHART_HEIGHT = 13.5  # cm
+CHART_SCALE_START = 0.65
+CHART_SCALE_STOP = 1.35
+CHART_SCALE_STEP = 0.025
 FACING_EXTERNAL = 2
 NUM_TRYS = 2
+SCENE3_FILE = os.path.join(os.environ["CAMERA_ITS_TOP"], "pymodules", "its",
+                           "test_images", "ISO12233.png")
 SKIP_RET_CODE = 101  # note this must be same as tests/scene*/test_*
+VGA_HEIGHT = 480
+VGA_WIDTH = 640
+
+# Not yet mandated tests
+NOT_YET_MANDATED = {
+        "scene0": [
+                "test_jitter",
+                "test_burst_capture",
+                "test_test_patterns"
+        ],
+        "scene1": [
+                "test_ae_af",
+                "test_ae_precapture_trigger",
+                "test_crop_region_raw",
+                "test_ev_compensation_advanced",
+                "test_ev_compensation_basic",
+                "test_yuv_plus_jpeg"
+        ],
+        "scene2": [
+                "test_num_faces"
+        ],
+        "scene3": [
+                "test_flip_mirror",
+                "test_lens_movement_reporting",
+                "test_lens_position"
+        ],
+        "scene4": [],
+        "scene5": [],
+        "sensor_fusion": []
+}
+
+
+def calc_camera_fov(camera_id):
+    """Determine the camera field of view from internal params."""
+    with ItsSession(camera_id) as cam:
+        props = cam.get_camera_properties()
+        focal_ls = props['android.lens.info.availableFocalLengths']
+        if len(focal_ls) > 1:
+            print 'Doing capture to determine logical camera focal length'
+            cap = cam.do_capture(its.objects.auto_capture_request())
+            focal_l = cap['metadata']['android.lens.focalLength']
+        else:
+            focal_l = focal_ls[0]
+    sensor_size = props['android.sensor.info.physicalSize']
+    diag = math.sqrt(sensor_size['height'] ** 2 +
+                     sensor_size['width'] ** 2)
+    try:
+        fov = str(round(2 * math.degrees(math.atan(diag / (2 * focal_l))), 2))
+    except ValueError:
+        fov = str(0)
+    print 'Calculated FoV: %s' % fov
+    return fov
 
 
 def evaluate_socket_failure(err_file_path):
@@ -36,16 +101,17 @@
     with open(err_file_path, 'r') as ferr:
         for line in ferr:
             if (line.find('socket.error') != -1 or
+                line.find('socket.timeout') != -1 or
                 line.find('Problem with socket') != -1):
                 socket_fail = True
     return socket_fail
 
 
-def skip_sensor_fusion():
+def skip_sensor_fusion(camera_id):
     """Determine if sensor fusion test is skipped for this camera."""
 
     skip_code = SKIP_RET_CODE
-    with ItsSession() as cam:
+    with ItsSession(camera_id) as cam:
         props = cam.get_camera_properties()
         if (its.caps.sensor_fusion(props) and its.caps.manual_sensor(props) and
                 props['android.lens.facing'] is not FACING_EXTERNAL):
@@ -79,35 +145,9 @@
         tmp_dir: location of temp directory for output files
         skip_scene_validation: force skip scene validation. Used when test scene
                  is setup up front and don't require tester validation.
+        dist:    [Experimental] chart distance in cm.
     """
 
-    # Not yet mandated tests
-    NOT_YET_MANDATED = {
-        "scene0": [
-            "test_jitter",
-            "test_burst_capture"
-            ],
-        "scene1": [
-            "test_ae_af",
-            "test_ae_precapture_trigger",
-            "test_crop_region_raw",
-            "test_ev_compensation_advanced",
-            "test_ev_compensation_basic",
-            "test_yuv_plus_jpeg"
-            ],
-        "scene2": [
-            "test_num_faces",
-            ],
-        "scene3": [
-            "test_3a_consistency",
-            "test_lens_movement_reporting",
-            "test_lens_position"
-            ],
-        "scene4": [],
-        "scene5": [],
-        "sensor_fusion": []
-    }
-
     all_scenes = ["scene0", "scene1", "scene2", "scene3", "scene4", "scene5",
                   "sensor_fusion"]
 
@@ -139,6 +179,8 @@
     rot_rig_id = None
     tmp_dir = None
     skip_scene_validation = False
+    chart_distance = CHART_DISTANCE
+
     for s in sys.argv[1:]:
         if s[:7] == "camera=" and len(s) > 7:
             camera_ids = s[7:].split(',')
@@ -155,7 +197,10 @@
             tmp_dir = s[8:]
         elif s == 'skip_scene_validation':
             skip_scene_validation = True
+        elif s[:5] == 'dist=' and len(s) > 5:
+            chart_distance = float(re.sub('cm', '', s[5:]))
 
+    chart_dist_arg = 'dist= ' + str(chart_distance)
     auto_scene_switch = chart_host_id is not None
     merge_result_switch = result_device_id is not None
 
@@ -183,7 +228,7 @@
                     break
 
         if not valid_scenes:
-            print "Unknown scene specifiied:", s
+            print 'Unknown scene specified:', s
             assert False
         scenes = temp_scenes
 
@@ -218,16 +263,8 @@
 
     # user doesn't specify camera id, run through all cameras
     if not camera_ids:
-        camera_ids_path = os.path.join(topdir, "camera_ids.txt")
-        out_arg = "out=" + camera_ids_path
-        cmd = ['python',
-               os.path.join(os.getcwd(), "tools/get_camera_ids.py"), out_arg,
-               device_id_arg]
-        cam_code = subprocess.call(cmd, cwd=topdir)
-        assert cam_code == 0
-        with open(camera_ids_path, "r") as f:
-            for line in f:
-                camera_ids.append(line.replace('\n', ''))
+        with its.device.ItsSession() as cam:
+            camera_ids = cam.get_camera_ids()
 
     print "Running ITS on camera: %s, scene %s" % (camera_ids, scenes)
 
@@ -245,6 +282,7 @@
             assert wake_code == 0
 
     for camera_id in camera_ids:
+        camera_fov = calc_camera_fov(camera_id)
         # Loop capturing images until user confirm test scene is correct
         camera_id_arg = "camera=" + camera_id
         print "Preparing to run ITS on camera", camera_id
@@ -254,6 +292,8 @@
             os.mkdir(os.path.join(topdir, camera_id, d))
 
         for scene in scenes:
+            # unit is millisecond for execution time record in CtsVerifier
+            scene_start_time = int(round(time.time() * 1000))
             skip_code = None
             tests = [(s[:-3], os.path.join("tests", scene, s))
                      for s in os.listdir(os.path.join("tests", scene))
@@ -270,7 +310,7 @@
                 out_path = os.path.join(topdir, camera_id, scene+".jpg")
                 out_arg = "out=" + out_path
                 if scene == 'sensor_fusion':
-                    skip_code = skip_sensor_fusion()
+                    skip_code = skip_sensor_fusion(camera_id)
                     if rot_rig_id or skip_code == SKIP_RET_CODE:
                         validate_switch = False
                 if skip_scene_validation:
@@ -280,9 +320,10 @@
                     if (not merge_result_switch or
                             (merge_result_switch and camera_ids[0] == '0')):
                         scene_arg = 'scene=' + scene
+                        fov_arg = 'fov=' + camera_fov
                         cmd = ['python',
                                os.path.join(os.getcwd(), 'tools/load_scene.py'),
-                               scene_arg, screen_id_arg]
+                               scene_arg, chart_dist_arg, fov_arg, screen_id_arg]
                     else:
                         time.sleep(CHART_DELAY)
                 else:
@@ -299,6 +340,20 @@
                     valid_scene_code = subprocess.call(cmd, cwd=topdir)
                     assert valid_scene_code == 0
             print "Start running ITS on camera %s, %s" % (camera_id, scene)
+            # Extract chart from scene for scene3 once up front
+            chart_loc_arg = ''
+            chart_height = CHART_HEIGHT
+            if scene == 'scene3':
+                if float(camera_fov) < 90 and np.isclose(chart_distance, 22,
+                                                         rtol=0.1):
+                    chart_height *= 0.67
+                chart = its.cv2image.Chart(SCENE3_FILE, chart_height,
+                                           chart_distance, CHART_SCALE_START,
+                                           CHART_SCALE_STOP, CHART_SCALE_STEP,
+                                           camera_id)
+                chart_loc_arg = 'chart_loc=%.2f,%.2f,%.2f,%.2f,%.3f' % (
+                        chart.xnorm, chart.ynorm, chart.wnorm, chart.hnorm,
+                        chart.scale)
             # Run each test, capturing stdout and stderr.
             for (testname, testpath) in tests:
                 if auto_scene_switch:
@@ -330,7 +385,8 @@
                             test_code = skip_code
                     if skip_code is not SKIP_RET_CODE:
                         cmd = ['python', os.path.join(os.getcwd(), testpath)]
-                        cmd += sys.argv[1:] + [camera_id_arg]
+                        cmd += sys.argv[1:] + [camera_id_arg] + [chart_loc_arg]
+                        cmd += [chart_dist_arg]
                         with open(outpath, 'w') as fout, open(errpath, 'w') as ferr:
                             test_code = subprocess.call(
                                 cmd, stderr=ferr, stdout=fout, cwd=outdir)
@@ -364,10 +420,14 @@
 
                 msg = "%s %s/%s [%.1fs]" % (retstr, scene, testname, t1-t0)
                 print msg
+                its.device.adb_log(device_id, msg)
                 msg_short = "%s %s [%.1fs]" % (retstr, testname, t1-t0)
                 if test_failed:
                     summary += msg_short + "\n"
 
+            # unit is millisecond for execution time record in CtsVerifier
+            scene_end_time = int(round(time.time() * 1000))
+
             if numskip > 0:
                 skipstr = ", %d test%s skipped" % (
                     numskip, "s" if numskip > 1 else "")
@@ -393,8 +453,12 @@
             results[scene][result_key] = (ItsSession.RESULT_PASS if passed
                                           else ItsSession.RESULT_FAIL)
             results[scene][ItsSession.SUMMARY_KEY] = summary_path
+            results[scene][ItsSession.START_TIME_KEY] = scene_start_time
+            results[scene][ItsSession.END_TIME_KEY] = scene_end_time
 
-        print "Reporting ITS result to CtsVerifier"
+        msg = "Reporting ITS result to CtsVerifier"
+        print msg
+        its.device.adb_log(device_id, msg)
         if merge_result_switch:
             # results are modified by report_result
             results_backup = copy.deepcopy(results)
diff --git a/apps/CameraITS/tools/run_parallel_tests.py b/apps/CameraITS/tools/run_parallel_tests.py
index 902eaba..cdba01e 100644
--- a/apps/CameraITS/tools/run_parallel_tests.py
+++ b/apps/CameraITS/tools/run_parallel_tests.py
@@ -63,7 +63,7 @@
     if scenes is None:
         scenes = auto_scenes
 
-    print ">>> Start the at %s" % time.strftime('%Y/%m/%d %H:%M:%S')
+    print ">>> Start the test at %s" % time.strftime('%Y/%m/%d %H:%M:%S')
     for scene in scenes:
         cmds = []
         cmds.append(build_cmd(device0_id, chart_host_id, device1_id, 0, scene))
diff --git a/apps/CameraITS/tools/run_sensor_fusion_box.py b/apps/CameraITS/tools/run_sensor_fusion_box.py
index ec16b3d..3c9199a 100644
--- a/apps/CameraITS/tools/run_sensor_fusion_box.py
+++ b/apps/CameraITS/tools/run_sensor_fusion_box.py
@@ -82,10 +82,6 @@
         elif s[:8] == 'tmp_dir=' and len(s) > 8:
             tmp_dir = s[8:]
 
-    if camera_id not in ['0', '1']:
-        print 'Need to specify camera 0 or 1'
-        sys.exit()
-
     # Make output directories to hold the generated files.
     tmpdir = tempfile.mkdtemp(dir=tmp_dir)
     print 'Saving output files to:', tmpdir, '\n'
@@ -94,6 +90,12 @@
     device_id_arg = 'device=' + device_id
     print 'Testing device ' + device_id
 
+    # ensure camera_id is valid
+    avail_camera_ids = find_avail_camera_ids(device_id_arg, tmpdir)
+    if camera_id not in avail_camera_ids:
+        print 'Need to specify valid camera_id in ', avail_camera_ids
+        sys.exit()
+
     camera_id_arg = 'camera=' + camera_id
     if rotator_ids:
         rotator_id_arg = 'rotator=' + rotator_ids
@@ -104,6 +106,7 @@
 
     fps_arg = 'fps=' + fps
     test_length_arg = 'test_length=' + test_length
+    print 'Capturing at %sfps' % fps
 
     os.mkdir(os.path.join(tmpdir, camera_id))
 
@@ -217,6 +220,19 @@
                 return line
     return None
 
+def find_avail_camera_ids(device_id_arg, tmpdir):
+    """Find the available camera IDs.
+
+    Args:
+        devices_id_arg(str):    device=###
+        tmpdir(str):            generated tmp dir for run
+    Returns:
+        list of available cameras
+    """
+    with its.device.ItsSession() as cam:
+        avail_camera_ids = cam.get_camera_ids()
+    return avail_camera_ids
+
 
 if __name__ == '__main__':
     main()
diff --git a/apps/CtsVerifier/Android.mk b/apps/CtsVerifier/Android.mk
index ba9155c..3208ee5 100644
--- a/apps/CtsVerifier/Android.mk
+++ b/apps/CtsVerifier/Android.mk
@@ -25,33 +25,45 @@
 
 LOCAL_SRC_FILES := $(call all-java-files-under, src) $(call all-Iaidl-files-under, src)
 
+LOCAL_AIDL_INCLUDES := \
+    frameworks/native/aidl/gui
+
+LOCAL_USE_AAPT2 := true
+
 LOCAL_STATIC_JAVA_LIBRARIES := android-ex-camera2 \
                                compatibility-common-util-devicesidelib \
                                cts-sensors-tests \
                                cts-location-tests \
-                               ctstestrunner \
+                               ctstestrunner-axt \
                                apache-commons-math \
                                androidplot \
                                ctsverifier-opencv \
                                core-tests-support \
-                               android-support-v4  \
+                               androidx.legacy_legacy-support-v4  \
                                mockito-target-minus-junit4 \
                                mockwebserver \
-                               compatibility-device-util \
-                               platform-test-annotations
+                               compatibility-device-util-axt \
+                               platform-test-annotations \
+                               cts-security-test-support-library
 
-LOCAL_JAVA_LIBRARIES := legacy-android-test
+LOCAL_STATIC_ANDROID_LIBRARIES := \
+    androidx.legacy_legacy-support-v4
+
+LOCAL_JAVA_LIBRARIES += telephony-common
+LOCAL_JAVA_LIBRARIES += android.test.runner.stubs
+LOCAL_JAVA_LIBRARIES += android.test.base.stubs
+LOCAL_JAVA_LIBRARIES += android.test.mock.stubs
+LOCAL_JAVA_LIBRARIES += bouncycastle
+LOCAL_JAVA_LIBRARIES += voip-common
 
 LOCAL_PACKAGE_NAME := CtsVerifier
+LOCAL_PRIVATE_PLATFORM_APIS := true
 
 LOCAL_JNI_SHARED_LIBRARIES := libctsverifier_jni \
 		libaudioloopback_jni \
-		libnativehelper_compat_libc++
 
 LOCAL_PROGUARD_FLAG_FILES := proguard.flags
 
-LOCAL_SDK_VERSION := test_current
-
 LOCAL_DEX_PREOPT := false
 -include cts/error_prone_rules_tests.mk
 include $(BUILD_PACKAGE)
@@ -75,9 +87,9 @@
     $(call java-files-in, src/com/android/cts/verifier) \
     $(call all-Iaidl-files-under, src)
 
-LOCAL_STATIC_JAVA_LIBRARIES := android-support-v4 \
+LOCAL_STATIC_JAVA_LIBRARIES := androidx.legacy_legacy-support-v4 \
                                compatibility-common-util-devicesidelib \
-                               compatibility-device-util \
+                               compatibility-device-util-axt \
 
 include $(BUILD_STATIC_JAVA_LIBRARY)
 
@@ -91,6 +103,7 @@
 
 pre-installed-apps := \
     CtsEmptyDeviceAdmin \
+    CtsEmptyDeviceOwner \
     CtsPermissionApp \
     NotificationBot
 
@@ -113,7 +126,7 @@
 cts-verifier: CtsVerifier adb $(pre-installed-apps)
 	adb install -r $(PRODUCT_OUT)/data/app/CtsVerifier/CtsVerifier.apk \
 		$(foreach app,$(pre-installed-apps), \
-		    && adb install -r $(call apk-location-for,$(app))) \
+		    && adb install -r -t $(call apk-location-for,$(app))) \
 		&& adb shell "am start -n com.android.cts.verifier/.CtsVerifierActivity"
 
 #
@@ -155,8 +168,6 @@
 		$(hide) $(ACP) -fpr $(HOST_OUT)/CameraITS $(verifier-dir)
 		$(hide) cd $(cts-dir) && zip -rq $(verifier-dir-name) $(verifier-dir-name)
 
-ifneq ($(filter cts, $(MAKECMDGOALS)),)
-  $(call dist-for-goals, cts, $(verifier-zip):$(verifier-zip-name))
-endif
+$(call dist-for-goals, cts, $(verifier-zip):$(verifier-zip-name))
 
 include $(call all-makefiles-under,$(LOCAL_PATH))
diff --git a/apps/CtsVerifier/AndroidManifest.xml b/apps/CtsVerifier/AndroidManifest.xml
index 877d83b..e95fd1e 100644
--- a/apps/CtsVerifier/AndroidManifest.xml
+++ b/apps/CtsVerifier/AndroidManifest.xml
@@ -18,9 +18,9 @@
 <manifest xmlns:android="http://schemas.android.com/apk/res/android"
       package="com.android.cts.verifier"
       android:versionCode="5"
-      android:versionName="8.1_r1">
+      android:versionName="9.0_r1">
 
-    <uses-sdk android:minSdkVersion="19" android:targetSdkVersion="27"/>
+    <uses-sdk android:minSdkVersion="19" android:targetSdkVersion="28"/>
 
     <uses-permission android:name="android.permission.ACCESS_FINE_LOCATION" />
     <uses-permission android:name="android.permission.ACCESS_LOCATION_EXTRA_COMMANDS"/>
@@ -32,6 +32,7 @@
     <uses-permission android:name="android.permission.CAMERA" />
     <uses-permission android:name="android.permission.CHANGE_NETWORK_STATE" />
     <uses-permission android:name="android.permission.CHANGE_WIFI_STATE" />
+    <uses-permission android:name="android.permission.FOREGROUND_SERVICE"/>
     <uses-permission android:name="android.permission.FULLSCREEN" />
     <uses-permission android:name="android.permission.INTERNET" />
     <uses-permission android:name="android.permission.NFC" />
@@ -46,7 +47,6 @@
                   android:required="false" />
     <uses-feature android:name="android.hardware.camera.autofocus"
                   android:required="false" />
-    <uses-feature android:name="android.software.vr.mode" android:required="false" />
     <uses-feature android:name="android.hardware.vr.high_performance" android:required="false"/>
     <uses-feature android:name="android.software.companion_device_setup" />
     <uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
@@ -62,11 +62,18 @@
     <uses-permission android:name="android.permission.ACCESS_NETWORK_STATE"/>
     <uses-permission android:name="com.android.providers.tv.permission.WRITE_EPG_DATA" />
     <uses-permission android:name="android.permission.USE_FINGERPRINT"/>
+    <uses-permission android:name="android.permission.USE_BIOMETRIC"/>
     <uses-permission android:name="android.permission.ACCESS_NOTIFICATION_POLICY" />
     <uses-permission android:name="android.permission.ACCESS_WIFI_STATE" />
     <uses-permission android:name="android.permission.CHANGE_WIFI_STATE" />
     <uses-permission android:name="android.permission.ACCESS_COARSE_LOCATION" />
 
+    <uses-permission android:name="android.permission.READ_PHONE_STATE"/>
+    <uses-permission android:name="android.permission.READ_SMS"/>
+    <uses-permission android:name="android.permission.READ_PHONE_NUMBERS"/>
+    <uses-permission android:name="android.permission.RECEIVE_SMS" />
+    <uses-permission android:name="android.permission.SEND_SMS" />
+
     <!-- Needed by UsbTest tapjacking -->
     <uses-permission android:name="android.permission.SYSTEM_ALERT_WINDOW" />
 
@@ -76,12 +83,18 @@
     <!-- Needed for Telecom self-managed ConnectionService tests. -->
     <uses-permission android:name="android.permission.MANAGE_OWN_CALLS" />
 
-    <application android:label="@string/app_name"
+    <application android:networkSecurityConfig="@xml/network_security_config"
+            android:label="@string/app_name"
             android:icon="@drawable/icon"
             android:debuggable="true"
             android:largeHeap="true"
             android:theme="@android:style/Theme.DeviceDefault">
 
+        <provider android:name="android.location.cts.MmsPduProvider"
+                android:authorities="emergencycallverifier"
+                android:grantUriPermissions="true" />
+        <uses-library android:name="android.test.runner" />
+
         <meta-data android:name="SuiteName" android:value="CTS_VERIFIER" />
 
         <meta-data android:name="android.telephony.HIDE_VOICEMAIL_SETTINGS_MENU"
@@ -169,8 +182,13 @@
                        android:value="android.software.companion_device_setup" />
         </activity>
 
-        <!-- A generic activity for intent based tests -->
-        <activity android:name=".IntentDrivenTestActivity"/>
+        <!-- A generic activity for intent based tests.
+        stateNotNeeded is defined ot prevent IntentDrivenTestActivity from being killed when
+        switching users. IntentDrivenTestActivity does not implement onSaveInstanceState() so it is
+        fine to ignore onSaveInstanceState() not being called.
+        -->
+        <activity android:name=".IntentDrivenTestActivity"
+                android:stateNotNeeded="true"/>
 
         <activity android:name=".admin.DeviceAdminKeyguardDisabledFeaturesActivity"
                 android:label="@string/da_kg_disabled_features_test"
@@ -259,6 +277,56 @@
         </activity>
 
         <!--
+             CTS Verifier Bluetooth Hid Device Test Screen
+                 test category : bt_device_communication
+                 test parent : BluetoothTestActivity
+        -->
+        <activity
+            android:name=".bluetooth.HidDeviceActivity"
+            android:configChanges="keyboardHidden|orientation|screenSize"
+            android:label="@string/bt_hid_device" >
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+
+                <category android:name="android.cts.intent.category.MANUAL_TEST" />
+            </intent-filter>
+
+            <meta-data
+                android:name="test_category"
+                android:value="@string/bt_hid" />
+            <meta-data
+                android:name="test_parent"
+                android:value="com.android.cts.verifier.bluetooth.BluetoothTestActivity" />
+        </activity>
+
+        <!-- Support service to ensure HID Device Test succeeds on
+            devices with full screen pairing dialogs (ex. Android TV -->
+        <service android:name=".bluetooth.FocusLossPreventionService" />
+
+        <!--
+             CTS Verifier Bluetooth Hid Host Test Screen
+                 test category : bt_device_communication
+                 test parent : BluetoothTestActivity
+        -->
+        <activity
+            android:name=".bluetooth.HidHostActivity"
+            android:configChanges="keyboardHidden|orientation|screenSize"
+            android:label="@string/bt_hid_host" >
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+
+                <category android:name="android.cts.intent.category.MANUAL_TEST" />
+            </intent-filter>
+
+            <meta-data
+                android:name="test_category"
+                android:value="@string/bt_hid" />
+            <meta-data
+                android:name="test_parent"
+                android:value="com.android.cts.verifier.bluetooth.BluetoothTestActivity" />
+        </activity>
+
+        <!--
              CTS Verifier Bluetooth Secure Server Test Screen
                  test category : bt_device_communication
                  test parent : BluetoothTestActivity
@@ -515,13 +583,17 @@
             <meta-data
                 android:name="test_required_features"
                 android:value="android.hardware.bluetooth_le" />
+            <meta-data
+                android:name="test_excluded_features"
+                android:value="android.hardware.type.watch"  />
         </activity>
 
         <!--
            CTS Verifier BLE Insecure Client Encrypted Test Screen
                test category : bt_le
                test parent : BleInsecureClientTestListActivity
-        -->
+	-->
+        <!-- TODO(b/78538657)
         <activity
             android:name=".bluetooth.BleInsecureEncryptedClientTestActivity"
             android:configChanges="keyboardHidden|orientation|screenSize"
@@ -542,7 +614,8 @@
             <meta-data
                 android:name="test_required_features"
                 android:value="android.hardware.bluetooth_le" />
-        </activity>
+	</activity>
+        -->
 
         <!--
               =================================================================================
@@ -625,6 +698,9 @@
             <meta-data
                 android:name="test_required_features"
                 android:value="android.hardware.bluetooth_le" />
+            <meta-data
+                android:name="test_excluded_features"
+                android:value="android.hardware.type.watch"  />
         </activity>
 
         <!--
@@ -632,6 +708,7 @@
                test category : bt_le
                test parent : BleInsecureServerTestListActivity
         -->
+        <!-- TODO(b/78538657)
         <activity
             android:name=".bluetooth.BleInsecureEncryptedServerTestActivity"
             android:configChanges="keyboardHidden|orientation|screenSize"
@@ -653,6 +730,7 @@
                 android:name="test_required_features"
                 android:value="android.hardware.bluetooth_le" />
         </activity>
+        -->
 
         <!--
              =================================================================================
@@ -736,6 +814,9 @@
             <meta-data
                 android:name="test_required_features"
                 android:value="android.hardware.bluetooth_le" />
+            <meta-data
+                android:name="test_excluded_features"
+                android:value="android.hardware.type.watch"  />
         </activity>
 
         <!--
@@ -846,6 +927,9 @@
             <meta-data
                 android:name="test_required_features"
                 android:value="android.hardware.bluetooth_le" />
+            <meta-data
+                android:name="test_excluded_features"
+                android:value="android.hardware.type.watch"  />
         </activity>
 
         <!--
@@ -996,6 +1080,20 @@
                        android:value="android.hardware.type.television:android.software.leanback:android.hardware.type.watch" />
             <meta-data android:name="test_required_features" android:value="android.hardware.fingerprint" />
         </activity>
+
+        <activity android:name=".security.BiometricPromptBoundKeysTest"
+            android:label="@string/sec_fingerprint_dialog_bound_key_test"
+            android:configChanges="keyboardHidden|orientation|screenSize" >
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.cts.intent.category.MANUAL_TEST" />
+            </intent-filter>
+            <meta-data android:name="test_category" android:value="@string/test_category_security" />
+            <meta-data android:name="test_excluded_features"
+                android:value="android.hardware.type.television:android.software.leanback:android.hardware.type.watch" />
+            <meta-data android:name="test_required_features" android:value="android.hardware.fingerprint" />
+        </activity>
+
         <activity android:name=".security.ScreenLockBoundKeysTest"
                 android:label="@string/sec_lock_bound_key_test"
                 android:configChanges="keyboardHidden|orientation|screenSize" >
@@ -1009,6 +1107,7 @@
             <meta-data android:name="test_required_features"
                     android:value="android.software.device_admin" />
         </activity>
+
         <activity android:name=".security.LockConfirmBypassTest"
                 android:label="@string/lock_confirm_test_title"
                 android:configChanges="keyboardHidden|orientation|screenSize" >
@@ -1061,94 +1160,6 @@
             <meta-data android:name="test_required_features" android:value="android.hardware.location.gps" />
         </activity>
 
-        <activity android:name=".location.GnssMeasurementsConstellationTestsActivity"
-            android:label="@string/location_gnss_constellation_type_test"
-            android:screenOrientation="locked">
-            <intent-filter>
-                <action android:name="android.intent.action.MAIN"/>
-                <category android:name="android.cts.intent.category.MANUAL_TEST"/>
-            </intent-filter>
-            <meta-data android:name="test_category" android:value="@string/test_category_hardware"/>
-            <meta-data android:name="test_required_features" android:value="android.hardware.location.gps" />
-        </activity>
-
-        <activity android:name=".location.GnssMeasurementRegistrationTestsActivity"
-            android:label="@string/location_gnss_reg_test"
-            android:screenOrientation="locked">
-            <intent-filter>
-                <action android:name="android.intent.action.MAIN"/>
-                <category android:name="android.cts.intent.category.MANUAL_TEST"/>
-            </intent-filter>
-            <meta-data android:name="test_category" android:value="@string/test_category_hardware"/>
-            <meta-data android:name="test_required_features" android:value="android.hardware.location.gps" />
-        </activity>
-
-        <activity android:name=".location.GnssMeasurementValuesTestsActivity"
-            android:label="@string/location_gnss_value_test"
-            android:screenOrientation="locked">
-            <intent-filter>
-                <action android:name="android.intent.action.MAIN"/>
-                <category android:name="android.cts.intent.category.MANUAL_TEST"/>
-            </intent-filter>
-            <meta-data android:name="test_category" android:value="@string/test_category_hardware"/>
-            <meta-data android:name="test_required_features" android:value="android.hardware.location.gps" />
-        </activity>
-
-        <activity android:name=".location.GnssPseudorangeVerificationTestsActivity"
-            android:label="@string/location_pseudorange_value_test"
-            android:screenOrientation="locked">
-            <intent-filter>
-                <action android:name="android.intent.action.MAIN"/>
-                <category android:name="android.cts.intent.category.MANUAL_TEST"/>
-            </intent-filter>
-            <meta-data android:name="test_category" android:value="@string/test_category_hardware"/>
-            <meta-data android:name="test_required_features" android:value="android.hardware.location.gps" />
-        </activity>
-
-        <activity android:name=".location.GnssTtffTestsActivity"
-            android:label="@string/location_gnss_ttff_test"
-            android:screenOrientation="locked">
-            <intent-filter>
-                <action android:name="android.intent.action.MAIN"/>
-                <category android:name="android.cts.intent.category.MANUAL_TEST"/>
-            </intent-filter>
-            <meta-data android:name="test_category" android:value="@string/test_category_hardware"/>
-            <meta-data android:name="test_required_features" android:value="android.hardware.location.gps" />
-        </activity>
-
-        <activity android:name=".location.GnssMeasurementWhenNoLocationTestsActivity"
-            android:label="@string/location_gnss_measure_no_location_test"
-            android:screenOrientation="locked">
-            <intent-filter>
-                <action android:name="android.intent.action.MAIN"/>
-                <category android:name="android.cts.intent.category.MANUAL_TEST"/>
-            </intent-filter>
-            <meta-data android:name="test_category" android:value="@string/test_category_hardware"/>
-            <meta-data android:name="test_required_features" android:value="android.hardware.location.gps" />
-        </activity>
-
-        <activity android:name=".location.GnssNavigationMessageTestsActivity"
-            android:label="@string/location_gnss_nav_msg_test"
-            android:screenOrientation="locked">
-            <intent-filter>
-                <action android:name="android.intent.action.MAIN"/>
-                <category android:name="android.cts.intent.category.MANUAL_TEST"/>
-            </intent-filter>
-            <meta-data android:name="test_category" android:value="@string/test_category_hardware"/>
-            <meta-data android:name="test_required_features" android:value="android.hardware.location.gps" />
-        </activity>
-
-        <!--  activity android:name=".location.GnssStatusTestsActivity"
-            android:label="@string/location_gnss_status_test"
-            android:screenOrientation="locked">
-            <intent-filter>
-                <action android:name="android.intent.action.MAIN"/>
-                <category android:name="android.cts.intent.category.MANUAL_TEST"/>
-            </intent-filter>
-            <meta-data android:name="test_category" android:value="@string/test_category_hardware"/>
-            <meta-data android:name="test_required_features" android:value="android.hardware.location.gps" />
-        </activity -->
-
         <activity android:name=".location.LocationListenerActivity"
                 android:label="@string/location_listener_activity"
                 android:configChanges="keyboardHidden|orientation|screenSize">
@@ -1168,6 +1179,19 @@
             <meta-data android:name="test_required_features" android:value="android.hardware.wifi" />
         </activity>
 
+        <activity android:name=".net.MultiNetworkConnectivityTestActivity"
+                  android:label="@string/multinetwork_connectivity_test">
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.cts.intent.category.MANUAL_TEST" />
+            </intent-filter>
+            <meta-data android:name="test_category" android:value="@string/test_category_networking" />
+            <meta-data android:name="test_required_features"
+                       android:value="android.hardware.wifi:android.hardware.telephony" />
+            <meta-data android:name="test_excluded_features"
+                       android:value="android.hardware.type.television:android.software.leanback:android.hardware.type.watch" />
+        </activity>
+
         <activity android:name=".nfc.NfcTestActivity"
                 android:label="@string/nfc_test"
                 android:configChanges="keyboardHidden|orientation|screenSize">
@@ -1481,6 +1505,8 @@
             <meta-data android:name="test_category" android:value="@string/test_category_sensors"/>
             <meta-data android:name="test_required_features"
                        android:value="android.hardware.sensor.accelerometer"/>
+            <meta-data android:name="test_excluded_features"
+                       android:value="android.hardware.type.automotive"/>
         </activity>
 
         <activity android:name=".sensors.GyroscopeMeasurementTestActivity"
@@ -1493,6 +1519,8 @@
             <meta-data android:name="test_category" android:value="@string/test_category_sensors"/>
             <meta-data android:name="test_required_features"
                        android:value="android.hardware.sensor.gyroscope"/>
+            <meta-data android:name="test_excluded_features"
+                       android:value="android.hardware.type.automotive"/>
         </activity>
 
         <activity android:name=".sensors.HeartRateMonitorTestActivity"
@@ -1521,7 +1549,7 @@
 
         <activity android:name=".sensors.OffBodySensorTestActivity"
             android:label="@string/snsr_offbody_sensor_test">
-            <receiver android:name="com.android.cts.verifier.sensors.OffBodySensorTestActivity$AlarmReceiver"></receiver>
+<!--            <receiver android:name="com.android.cts.verifier.sensors.OffBodySensorTestActivity$AlarmReceiver"></receiver>-->
             <intent-filter>
                 <action android:name="android.intent.action.MAIN" />
                 <category android:name="android.cts.intent.category.MANUAL_TEST" />
@@ -1605,51 +1633,6 @@
         </activity>
         <!-- End sensor tests definitions -->
 
-        <activity android:name=".location.LocationModeOffTestActivity"
-                android:label="@string/location_mode_off_test">
-            <intent-filter>
-                <action android:name="android.intent.action.MAIN" />
-                <category android:name="android.cts.intent.category.MANUAL_TEST" />
-            </intent-filter>
-            <meta-data android:name="test_category" android:value="@string/test_category_location" />
-            <meta-data android:name="test_excluded_features"
-                    android:value="android.hardware.type.television:android.software.leanback" />
-        </activity>
-        <activity android:name=".location.LocationModeHighAccuracyTestActivity"
-                android:label="@string/location_mode_high_accuracy_test">
-            <intent-filter>
-                <action android:name="android.intent.action.MAIN" />
-                <category android:name="android.cts.intent.category.MANUAL_TEST" />
-            </intent-filter>
-            <meta-data android:name="test_category" android:value="@string/test_category_location" />
-            <meta-data android:name="test_required_features"
-                    android:value="android.hardware.location.network:android.hardware.location.gps" />
-            <meta-data android:name="test_excluded_features"
-                    android:value="android.hardware.type.television:android.software.leanback" />
-        </activity>
-        <activity android:name=".location.LocationModeBatterySavingTestActivity"
-                android:label="@string/location_mode_battery_saving_test">
-            <intent-filter>
-                <action android:name="android.intent.action.MAIN" />
-                <category android:name="android.cts.intent.category.MANUAL_TEST" />
-            </intent-filter>
-            <meta-data android:name="test_category" android:value="@string/test_category_location" />
-            <meta-data android:name="test_required_features" android:value="android.hardware.location.network" />
-            <meta-data android:name="test_excluded_features"
-                    android:value="android.hardware.type.television:android.software.leanback" />
-        </activity>
-        <activity android:name=".location.LocationModeDeviceOnlyTestActivity"
-                android:label="@string/location_mode_device_only_test">
-            <intent-filter>
-                <action android:name="android.intent.action.MAIN" />
-                <category android:name="android.cts.intent.category.MANUAL_TEST" />
-            </intent-filter>
-            <meta-data android:name="test_category" android:value="@string/test_category_location" />
-            <meta-data android:name="test_required_features" android:value="android.hardware.location.gps" />
-            <meta-data android:name="test_excluded_features"
-                    android:value="android.hardware.type.television:android.software.leanback" />
-        </activity>
-
         <activity android:name=".camera.formats.CameraFormatsActivity"
                  android:label="@string/camera_format"
                  android:screenOrientation="landscape">
@@ -1821,6 +1804,10 @@
 
         <activity android:name=".managedprovisioning.RecentsRedactionActivity"
                 android:label="@string/provisioning_byod_recents" >
+        </activity>
+        <activity android:name=".managedprovisioning.IntermediateRecentActivity"
+                  android:label="@string/provisioning_byod_recents"
+                  android:theme="@android:style/Theme.NoDisplay">
             <intent-filter>
                 <action android:name="com.android.cts.verifier.managedprovisioning.RECENTS" />
                 <category android:name="android.intent.category.DEFAULT"></category>
@@ -1846,6 +1833,14 @@
             <meta-data android:name="test_category" android:value="@string/test_category_notifications" />
         </activity>
 
+        <receiver android:name=".notifications.BlockChangeReceiver">
+            <intent-filter>
+                <action android:name="android.app.action.NOTIFICATION_CHANNEL_BLOCK_STATE_CHANGED"/>
+                <action android:name="android.app.action.NOTIFICATION_CHANNEL_GROUP_BLOCK_STATE_CHANGED"/>
+                <action android:name="android.app.action.APP_BLOCK_STATE_CHANGED"/>
+            </intent-filter>
+        </receiver>
+
         <activity android:name=".notifications.ConditionProviderVerifierActivity"
                   android:label="@string/cp_test">
             <intent-filter>
@@ -1854,7 +1849,7 @@
             </intent-filter>
             <meta-data android:name="test_category" android:value="@string/test_category_notifications" />
             <meta-data android:name="test_excluded_features"
-                       android:value="android.hardware.type.television:android.software.leanback:android.hardware.type.watch" />
+                       android:value="android.hardware.type.automotive:android.hardware.type.television:android.software.leanback:android.hardware.type.watch" />
         </activity>
 
         <activity android:name=".notifications.AttentionManagementVerifierActivity"
@@ -1895,7 +1890,7 @@
             </intent-filter>
             <meta-data android:name="test_category" android:value="@string/test_category_notifications" />
             <meta-data android:name="test_excluded_features"
-                android:value="android.hardware.type.watch:android.software.leanback" />
+                android:value="android.hardware.type.watch:android.software.leanback:android.hardware.type.automotive" />
         </activity>
 
         <activity android:name=".vr.VrListenerVerifierActivity"
@@ -1907,7 +1902,7 @@
             </intent-filter>
             <meta-data android:name="test_category" android:value="@string/test_category_vr" />
             <meta-data android:name="test_required_features"
-                       android:value="android.software.vr.mode" />
+                       android:value="android.hardware.vr.high_performance" />
         </activity>
 
         <activity android:name=".vr.MockVrActivity"
@@ -2072,6 +2067,14 @@
                   android:label="@string/aware_data_path_oob_passphrase_initiator"
                   android:configChanges="keyboardHidden|orientation|screenSize" />
 
+        <activity android:name=".wifiaware.DiscoveryRangingPublishTestActivity"
+                  android:label="@string/aware_discovery_ranging_publish"
+                  android:configChanges="keyboardHidden|orientation|screenSize" />
+
+        <activity android:name=".wifiaware.DiscoveryRangingSubscribeTestActivity"
+                  android:label="@string/aware_discovery_ranging_subscribe"
+                  android:configChanges="keyboardHidden|orientation|screenSize" />
+
         <activity-alias android:name=".CtsVerifierActivity" android:label="@string/app_name"
                 android:targetActivity=".TestListActivity">
             <intent-filter>
@@ -2144,6 +2147,11 @@
                     android:value="android.hardware.type.television:android.software.leanback:android.hardware.type.automotive" />
         </activity>
 
+        <service
+                android:name="com.android.cts.verifier.sensors.DeviceSuspendTestActivity$DeviceSuspendTestService"
+                android:label="@string/snsr_device_suspend_service"
+                android:icon="@drawable/icon" />
+
         <receiver android:name="com.android.cts.verifier.sensors.DeviceSuspendTestActivity$AlarmReceiver">
         </receiver>
 
@@ -2170,6 +2178,28 @@
                        android:value="android.hardware.sensor.accelerometer" />
         </activity>
 
+        <activity
+            android:name="com.android.cts.verifier.sensors.EventSanitizationTestActivity"
+            android:label="@string/snsr_event_sanitization_test"
+            android:screenOrientation="nosensor" >
+
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.cts.intent.category.MANUAL_TEST" />
+            </intent-filter>
+
+            <meta-data
+                android:name="test_category"
+                android:value="@string/test_category_sensors">
+            </meta-data>
+
+            <meta-data
+                android:name="test_required_features"
+                android:value="android.hardware.sensor.proximity:android.hardware.sensor.accelerometer">
+            </meta-data>
+
+        </activity>
+
         <receiver android:name=".widget.WidgetCtsProvider">
             <intent-filter>
                 <action android:name="android.appwidget.action.APPWIDGET_UPDATE" />
@@ -2289,6 +2319,15 @@
             <meta-data android:name="test_required_features" android:value="android.software.device_admin" />
         </activity>
 
+        <activity android:name=".managedprovisioning.ManagedUserPositiveTestActivity"
+                  android:label="@string/managed_user_test">
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <action android:name="com.android.cts.verifier.managedprovisioning.action.CHECK_AFFILIATED_PROFILE_OWNER" />
+                <category android:name="android.intent.category.DEFAULT" />
+            </intent-filter>
+        </activity>
+
         <activity android:name=".managedprovisioning.DeviceOwnerRequestingBugreportTestActivity"
                 android:label="@string/device_owner_requesting_bugreport_tests">
             <intent-filter>
@@ -2301,14 +2340,6 @@
             <meta-data android:name="test_required_features" android:value="android.software.device_admin" />
         </activity>
 
-        <activity android:name=".managedprovisioning.DeviceOwnerPositiveTestActivity$CommandReceiver"
-                android:exported="false"
-                android:theme="@android:style/Theme.NoDisplay"
-                android:noHistory="true"
-                android:autoRemoveFromRecents="true"
-                android:stateNotNeeded="true">
-        </activity>
-
         <activity android:name=".managedprovisioning.KeyguardDisabledFeaturesActivity"
                 android:label="@string/provisioning_byod_keyguard_disabled_features">
         </activity>
@@ -2317,6 +2348,14 @@
                 android:label="@string/provisioning_byod_disallow_apps_control">
         </activity>
 
+        <activity android:name=".managedprovisioning.LockTaskUiTestActivity"
+                android:label="@string/device_owner_lock_task_ui_test">
+            <intent-filter>
+                <action android:name="com.android.cts.verifier.managedprovisioning.action.STOP_LOCK_TASK" />
+                <category android:name="android.intent.category.DEFAULT" />
+            </intent-filter>
+        </activity>
+
         <activity android:name=".managedprovisioning.WifiLockdownTestActivity"
                 android:label="@string/device_owner_wifi_lockdown_test">
         </activity>
@@ -2344,6 +2383,14 @@
             </intent-filter>
         </activity>
 
+        <activity android:name=".managedprovisioning.KeyChainTestActivity"
+                android:label="@string/provisioning_byod_keychain">
+            <intent-filter>
+                <action android:name="com.android.cts.verifier.managedprovisioning.KEYCHAIN" />
+                <category android:name="android.intent.category.DEFAULT" />
+            </intent-filter>
+        </activity>
+
         <activity android:name=".managedprovisioning.PermissionLockdownTestActivity"
                 android:label="@string/device_profile_owner_permission_lockdown_test">
             <intent-filter>
@@ -2480,11 +2527,15 @@
             </intent-filter>
             <intent-filter>
                 <action android:name="com.android.cts.verifier.managedprovisioning.BYOD_STATUS" />
-                <category android:name="android.intent.category.DEFAULT"></category>
+                <category android:name="android.intent.category.DEFAULT" />
+            </intent-filter>
+            <intent-filter>
+            <action android:name="com.android.cts.verifier.managedprovisioning.BYOD_TEST_RESULT" />
+                <category android:name="android.intent.category.DEFAULT" />
             </intent-filter>
             <intent-filter>
                 <action android:name="com.android.cts.verifier.managedprovisioning.action.BYOD_DISK_ENCRYPTION_STATUS" />
-                <category android:name="android.intent.category.DEFAULT"></category>
+                <category android:name="android.intent.category.DEFAULT" />
             </intent-filter>
             <meta-data android:name="test_category" android:value="@string/test_category_managed_provisioning" />
             <meta-data android:name="test_required_features" android:value="android.software.managed_users:android.software.device_admin" />
@@ -2542,7 +2593,7 @@
         </activity>
 
         <provider
-            android:name="android.support.v4.content.FileProvider"
+            android:name="androidx.core.content.FileProvider"
             android:authorities="com.android.cts.verifier.managedprovisioning.fileprovider"
             android:grantUriPermissions="true"
             android:exported="false">
@@ -2659,7 +2710,15 @@
         </activity>
 
         <activity android:name=".managedprovisioning.TurnOffWorkActivity"
-                android:label="@string/provisioning_byod_turn_off_work">
+                  android:label="@string/provisioning_byod_turn_off_work">
+        </activity>
+
+        <activity android:name=".managedprovisioning.WorkProfileWidgetActivity"
+                  android:label="@string/provisioning_byod_work_profile_widget">
+        <intent-filter>
+                <action android:name="com.android.cts.verifier.byod.test_work_profile_widget"/>
+                <category android:name="android.intent.category.DEFAULT"/>
+            </intent-filter>
         </activity>
 
         <receiver android:name=".managedprovisioning.DeviceAdminTestReceiver"
@@ -2672,6 +2731,10 @@
                 <action android:name="android.app.action.PROFILE_PROVISIONING_COMPLETE"/>
             </intent-filter>
         </receiver>
+        <service android:name=".managedprovisioning.DeviceAdminTestReceiver$PrimaryUserService"
+                 android:exported="true"
+                 android:permission="android.permission.BIND_DEVICE_ADMIN">
+        </service>
 
 <!-- Comment out until b/28406044 is addressed
         <activity android:name=".jobscheduler.IdleConstraintTestActivity" android:label="@string/js_idle_test">
@@ -2836,6 +2899,7 @@
             </intent-filter>
             <meta-data android:name="test_category" android:value="@string/test_category_audio" />
             <meta-data android:name="test_required_features" android:value="android.hardware.audio.output" />
+            <meta-data android:name="test_excluded_features" android:value="android.software.leanback" />
         </activity>
 
         <activity android:name=".audio.AudioInputDeviceNotificationsActivity"
@@ -2846,6 +2910,7 @@
             </intent-filter>
             <meta-data android:name="test_category" android:value="@string/test_category_audio" />
             <meta-data android:name="test_required_features" android:value="android.hardware.microphone" />
+            <meta-data android:name="test_excluded_features" android:value="android.software.leanback" />
         </activity>
 
         <activity android:name=".audio.AudioOutputRoutingNotificationsActivity"
@@ -2856,7 +2921,8 @@
             </intent-filter>
             <meta-data android:name="test_category" android:value="@string/test_category_audio" />
             <meta-data android:name="test_required_features" android:value="android.hardware.audio.output" />
-            </activity>
+            <meta-data android:name="test_excluded_features" android:value="android.software.leanback" />
+        </activity>
 
         <activity android:name=".audio.AudioInputRoutingNotificationsActivity"
                   android:label="@string/audio_input_routingnotifications_test">
@@ -2866,7 +2932,8 @@
             </intent-filter>
             <meta-data android:name="test_category" android:value="@string/test_category_audio" />
             <meta-data android:name="test_required_features" android:value="android.hardware.microphone" />
-            </activity>
+            <meta-data android:name="test_excluded_features" android:value="android.software.leanback" />
+        </activity>
 
         <activity android:name=".audio.USBAudioPeripheralAttributesActivity"
                   android:label="@string/audio_uap_attribs_test">
@@ -2923,8 +2990,7 @@
                 <category android:name="android.cts.intent.category.MANUAL_TEST" />
             </intent-filter>
             <meta-data android:name="test_category" android:value="@string/test_category_audio" />
-            <meta-data android:name="test_required_features" android:value="android.hardware.microphone" />
-            <meta-data android:name="test_required_features" android:value="android.hardware.audio.output" />
+            <meta-data android:name="test_required_features" android:value="android.hardware.microphone:android.hardware.audio.output" />
             <meta-data android:name="test_excluded_features"
                        android:value="android.hardware.type.watch:android.hardware.type.television" />
         </activity>
@@ -2936,8 +3002,7 @@
                 <category android:name="android.cts.intent.category.MANUAL_TEST" />
             </intent-filter>
             <meta-data android:name="test_category" android:value="@string/test_category_audio" />
-            <meta-data android:name="test_required_features" android:value="android.hardware.microphone" />
-            <meta-data android:name="test_required_features" android:value="android.hardware.audio.output" />
+            <meta-data android:name="test_required_features" android:value="android.hardware.microphone:android.hardware.audio.output" />
         </activity>
 
         <activity android:name=".audio.AudioFrequencySpeakerActivity"
@@ -2947,8 +3012,7 @@
                 <category android:name="android.cts.intent.category.MANUAL_TEST" />
             </intent-filter>
             <meta-data android:name="test_category" android:value="@string/test_category_audio" />
-            <meta-data android:name="test_required_features" android:value="android.hardware.audio.output" />
-            <meta-data android:name="test_required_features" android:value="android.hardware.usb.host" />
+            <meta-data android:name="test_required_features" android:value="android.hardware.audio.output:android.hardware.usb.host" />
         </activity>
 
         <activity android:name=".audio.AudioFrequencyMicActivity"
@@ -2958,9 +3022,7 @@
                 <category android:name="android.cts.intent.category.MANUAL_TEST" />
             </intent-filter>
             <meta-data android:name="test_category" android:value="@string/test_category_audio" />
-            <meta-data android:name="test_required_features" android:value="android.hardware.microphone" />
-            <meta-data android:name="test_required_features" android:value="android.hardware.audio.output" />
-            <meta-data android:name="test_required_features" android:value="android.hardware.usb.host" />
+            <meta-data android:name="test_required_features" android:value="android.hardware.microphone:android.hardware.audio.output:android.hardware.usb.host" />
         </activity>
 
         <activity android:name=".audio.AudioFrequencyUnprocessedActivity"
@@ -3313,6 +3375,12 @@
                 <action android:name="android.telecom.ConnectionService" />
             </intent-filter>
         </service>
+        <service android:name="com.android.cts.verifier.telecom.CtsSelfManagedConnectionService"
+            android:permission="android.permission.BIND_TELECOM_CONNECTION_SERVICE" >
+            <intent-filter>
+                <action android:name="android.telecom.ConnectionService" />
+            </intent-filter>
+        </service>
     </application>
 
 </manifest>
diff --git a/apps/CtsVerifier/create_test_certs.sh b/apps/CtsVerifier/create_test_certs.sh
index b59974a..93fa377 100755
--- a/apps/CtsVerifier/create_test_certs.sh
+++ b/apps/CtsVerifier/create_test_certs.sh
@@ -20,11 +20,15 @@
 '/O=Android'\
 '/CN=localhost'
 PASSWORD='androidtest'
+SAN=\
+'DNS:localhost'
 
 echo "Creating directory '$CA_DIR'..."
 mkdir -p "$tmpdir"/"$CA_DIR"/newcerts \
     && echo '01' > "$tmpdir"/"$CA_DIR"/serial \
     && touch "$tmpdir"/"$CA_DIR"/index.txt
+cat /etc/ssl/openssl.cnf <(printf "\n[SAN]\nsubjectAltName=$SAN") \
+    > "$tmpdir"/openssl.conf
 
 echo "Generating CA certificate..."
 (cd "$tmpdir" \
@@ -52,6 +56,8 @@
         -days 3650 \
         -out 'userkey.req' \
         -subj "$SUBJECT" \
+        -extensions SAN \
+        -config openssl.conf \
     && openssl pkcs8 \
         -topk8 \
         -outform DER \
@@ -68,6 +74,8 @@
         -keyfile 'cakey.pem' \
         -days 3650 \
         -passin 'pass:'"$PASSWORD" \
+        -extensions SAN \
+        -config openssl.conf \
         -batch \
     && openssl x509 \
         -outform DER \
diff --git a/apps/CtsVerifier/jni/verifier/Android.mk b/apps/CtsVerifier/jni/verifier/Android.mk
index 42e2d26..9ee7eee 100644
--- a/apps/CtsVerifier/jni/verifier/Android.mk
+++ b/apps/CtsVerifier/jni/verifier/Android.mk
@@ -27,12 +27,15 @@
 
 LOCAL_C_INCLUDES := $(JNI_H_INCLUDE)
 
-LOCAL_CXX_STL := libc++_static
+LOCAL_SDK_VERSION := current
+LOCAL_NDK_STL_VARIANT := system
 
 LOCAL_SHARED_LIBRARIES := liblog \
-		libnativehelper_compat_libc++
 
-LOCAL_CFLAGS := -Wno-unused-parameter
+LOCAL_CFLAGS := \
+        -Wall -Werror \
+        -Wno-unused-parameter \
+        -Wno-unused-variable \
 
 include $(BUILD_SHARED_LIBRARY)
 
diff --git a/apps/CtsVerifier/jni/verifier/com_android_cts_verifier_camera_StatsImage.cpp b/apps/CtsVerifier/jni/verifier/com_android_cts_verifier_camera_StatsImage.cpp
index b7c96e2..6c948ea 100644
--- a/apps/CtsVerifier/jni/verifier/com_android_cts_verifier_camera_StatsImage.cpp
+++ b/apps/CtsVerifier/jni/verifier/com_android_cts_verifier_camera_StatsImage.cpp
@@ -17,7 +17,6 @@
 #define LOG_TAG "ITS-StatsImage-JNI"
 // #define LOG_NDEBUG 0
 #include <android/log.h>
-#include <utils/Log.h>
 
 #include <jni.h>
 #include <stdio.h>
diff --git a/apps/CtsVerifier/proguard.flags b/apps/CtsVerifier/proguard.flags
index e4249c4..2be1211 100644
--- a/apps/CtsVerifier/proguard.flags
+++ b/apps/CtsVerifier/proguard.flags
@@ -20,6 +20,9 @@
 
 -keepclasseswithmembers class * extends com.android.cts.verifier.location.LocationModeTestActivity
 
+-keepclasseswithmembers class * extends com.android.cts.verifier.audio.HifiUltrasoundSpeakerTestActivity
+-keepclasseswithmembers class * extends com.android.cts.verifier.audio.HifiUltrasoundTestActivity
+
 # keep mockito methods
 -keep class org.mockito.** { *; }
 -keep interface org.mockito.** { *; }
@@ -38,6 +41,6 @@
 -dontwarn com.android.org.bouncycastle.**
 -dontwarn com.android.okhttp.**
 -dontwarn org.opencv.**
--dontwarn android.support.test.internal.runner.hidden.ExposedInstrumentationApi
+-dontwarn androidx.test.internal.runner.hidden.ExposedInstrumentationApi
 
 -dontwarn java.lang.management.**
diff --git a/apps/CtsVerifier/res/layout-land/gnss_emergency_test.xml b/apps/CtsVerifier/res/layout-land/gnss_emergency_test.xml
new file mode 100644
index 0000000..bf3f29d
--- /dev/null
+++ b/apps/CtsVerifier/res/layout-land/gnss_emergency_test.xml
@@ -0,0 +1,59 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2018 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<com.android.cts.verifier.BoxInsetLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    xmlns:app="http://schemas.android.com/apk/res-auto"
+    android:layout_width="match_parent"
+    android:layout_height="match_parent">
+    <LinearLayout app:ctsv_layout_box="all"
+        android:orientation="vertical"
+        android:layout_width="match_parent"
+        android:layout_height="match_parent"
+        >
+
+        <LinearLayout
+            android:orientation="horizontal"
+            android:layout_width="match_parent"
+            android:layout_height="0dp"
+            android:layout_weight="1">
+
+            <TextView android:id="@+id/text"
+                android:textSize="14sp"
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content"/>
+
+            <ScrollView
+                android:id="@+id/log_scroll_view"
+                android:fillViewport="true"
+                android:layout_height="match_parent"
+                android:layout_width="0dp"
+                android:layout_weight="1">
+
+                <LinearLayout
+                    android:id="@+id/log_layout"
+                    android:orientation="vertical"
+                    android:layout_width="match_parent"
+                    android:layout_height="match_parent"/>
+            </ScrollView>
+
+        </LinearLayout>
+        <Button
+            android:id="@+id/next_button"
+            android:layout_width="match_parent"
+            android:layout_height="wrap_content"
+            android:text="@string/next_button_text" />
+        <include layout="@layout/pass_fail_buttons" />
+    </LinearLayout>
+</com.android.cts.verifier.BoxInsetLayout>
\ No newline at end of file
diff --git a/apps/CtsVerifier/res/layout-port/gnss_emergency_test.xml b/apps/CtsVerifier/res/layout-port/gnss_emergency_test.xml
new file mode 100644
index 0000000..fb23a32
--- /dev/null
+++ b/apps/CtsVerifier/res/layout-port/gnss_emergency_test.xml
@@ -0,0 +1,50 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2018 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    style="@style/RootLayoutPadding"
+    android:layout_width="match_parent"
+    android:layout_height="match_parent"
+    android:orientation="vertical">
+
+    <TextView
+        android:id="@+id/text"
+        android:layout_width="match_parent"
+        android:layout_height="wrap_content"
+        android:textSize="14sp" />
+
+    <ScrollView
+        android:id="@+id/log_scroll_view"
+        android:layout_width="0dp"
+        android:layout_height="match_parent"
+        android:layout_weight="1"
+        android:fillViewport="true">
+
+        <LinearLayout
+            android:id="@+id/log_layout"
+            android:layout_width="match_parent"
+            android:layout_height="match_parent"
+            android:orientation="vertical" />
+
+    </ScrollView>
+
+    <Button
+        android:id="@+id/next_button"
+        android:layout_width="match_parent"
+        android:layout_height="wrap_content"
+        android:text="@string/next_button_text" />
+    <include layout="@layout/pass_fail_buttons" />
+
+</LinearLayout>
diff --git a/apps/CtsVerifier/res/layout-round/provisioning_byod.xml b/apps/CtsVerifier/res/layout-round/provisioning_byod.xml
index d2b6e0e..cee2654 100644
--- a/apps/CtsVerifier/res/layout-round/provisioning_byod.xml
+++ b/apps/CtsVerifier/res/layout-round/provisioning_byod.xml
@@ -12,7 +12,7 @@
      See the License for the specific language governing permissions and
      limitations under the License.
 -->
-<android.support.v4.widget.NestedScrollView xmlns:android="http://schemas.android.com/apk/res/android"
+<androidx.core.widget.NestedScrollView xmlns:android="http://schemas.android.com/apk/res/android"
     android:id="@+id/main_layout"
     style="@style/RootLayoutPadding"
     android:layout_width="match_parent"
@@ -39,4 +39,4 @@
             android:layout_weight="3"/>
         <include layout="@layout/pass_fail_buttons"/>
     </LinearLayout>
-</android.support.v4.widget.NestedScrollView>
+</androidx.core.widget.NestedScrollView>
diff --git a/apps/CtsVerifier/res/layout-small/gnss_emergency_test.xml b/apps/CtsVerifier/res/layout-small/gnss_emergency_test.xml
new file mode 100644
index 0000000..7bd48f5
--- /dev/null
+++ b/apps/CtsVerifier/res/layout-small/gnss_emergency_test.xml
@@ -0,0 +1,51 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2018 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    style="@style/RootLayoutPadding"
+    android:layout_width="match_parent"
+    android:layout_height="match_parent">
+    <ScrollView
+        android:id="@+id/log_scroll_view"
+        android:layout_width="match_parent"
+        android:layout_height="match_parent"
+        android:fillViewport="true">
+        <LinearLayout
+            android:layout_width="match_parent"
+            android:layout_height="wrap_content"
+            android:orientation="vertical">
+
+            <TextView
+                android:id="@+id/text"
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content"
+                android:textSize="14sp" />
+
+            <LinearLayout
+                android:id="@+id/log_layout"
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content"
+                android:orientation="vertical" />
+
+            <Button
+                android:id="@+id/next_button"
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content"
+                android:text="@string/next_button_text" />
+            <include layout="@layout/pass_fail_buttons" />
+        </LinearLayout>
+
+    </ScrollView>
+</LinearLayout>
diff --git a/apps/CtsVerifier/res/layout-small/positive_managed_user.xml b/apps/CtsVerifier/res/layout-small/positive_managed_user.xml
new file mode 100644
index 0000000..84fa363
--- /dev/null
+++ b/apps/CtsVerifier/res/layout-small/positive_managed_user.xml
@@ -0,0 +1,45 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2017 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    style="@style/RootLayoutPadding"
+    android:layout_width="match_parent"
+    android:layout_height="match_parent"
+    android:orientation="vertical">
+
+    <ScrollView
+        android:layout_width="match_parent"
+        android:layout_height="match_parent">
+        <LinearLayout
+            android:layout_width="match_parent"
+            android:layout_height="match_parent"
+            android:orientation="vertical">
+
+            <TextView
+                android:id="@+id/positive_managed_user_instructions"
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content"
+                android:text="@string/managed_user_positive_tests_instructions"
+                android:textSize="16dip" />
+
+            <ListView
+                android:id="@+id/android:list"
+                android:layout_width="match_parent"
+                android:layout_height="800dip" />
+
+            <include layout="@layout/pass_fail_buttons" />
+        </LinearLayout>
+    </ScrollView>
+</LinearLayout>
diff --git a/apps/CtsVerifier/res/layout-watch/provisioning_byod.xml b/apps/CtsVerifier/res/layout-watch/provisioning_byod.xml
index 75cc90a..ea1d2db 100644
--- a/apps/CtsVerifier/res/layout-watch/provisioning_byod.xml
+++ b/apps/CtsVerifier/res/layout-watch/provisioning_byod.xml
@@ -13,7 +13,7 @@
      See the License for the specific language governing permissions and
      limitations under the License.
 -->
-<android.support.v4.widget.NestedScrollView xmlns:android="http://schemas.android.com/apk/res/android"
+<androidx.core.widget.NestedScrollView xmlns:android="http://schemas.android.com/apk/res/android"
     style="@style/RootLayoutPadding"
     android:layout_width="match_parent"
     android:layout_height="match_parent">
@@ -41,4 +41,4 @@
 
         <include layout="@layout/pass_fail_buttons"/>
     </LinearLayout>
-</android.support.v4.widget.NestedScrollView>
+</androidx.core.widget.NestedScrollView>
diff --git a/apps/CtsVerifier/res/layout/bt_hid_device.xml b/apps/CtsVerifier/res/layout/bt_hid_device.xml
new file mode 100644
index 0000000..478fc0e
--- /dev/null
+++ b/apps/CtsVerifier/res/layout/bt_hid_device.xml
@@ -0,0 +1,71 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2018 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<ScrollView xmlns:android="http://schemas.android.com/apk/res/android"
+    android:layout_width="match_parent"
+    android:layout_height="match_parent">
+
+<LinearLayout
+    style="@style/RootLayoutPadding"
+    android:layout_width="match_parent"
+    android:layout_height="match_parent"
+    android:orientation="vertical">
+
+  <TextView
+      android:layout_width="match_parent"
+      android:layout_height="wrap_content"
+      android:text="@string/bt_hid_device_register" />
+
+  <Button
+      android:id="@+id/bt_hid_device_register_button"
+      android:layout_width="match_parent"
+      android:layout_height="wrap_content"
+      android:text="@string/bt_hid_device_register" />
+
+  <Button
+      android:id="@+id/bt_hid_device_discoverable_button"
+      android:layout_width="match_parent"
+      android:layout_height="wrap_content"
+      android:text="@string/bt_make_discoverable" />
+
+  <Button
+      android:id="@+id/bt_hid_device_send_report_button"
+      android:layout_width="match_parent"
+      android:layout_height="wrap_content"
+      android:text="@string/bt_hid_device_send_report" />
+
+  <Button
+      android:id="@+id/bt_hid_device_reply_report_button"
+      android:layout_width="match_parent"
+      android:layout_height="wrap_content"
+      android:text="@string/bt_hid_device_reply_report" />
+
+  <Button
+      android:id="@+id/bt_hid_device_report_error_button"
+      android:layout_width="match_parent"
+      android:layout_height="wrap_content"
+      android:text="@string/bt_hid_device_report_error" />
+
+  <Button
+      android:id="@+id/bt_hid_device_unregister_button"
+      android:layout_width="match_parent"
+      android:layout_height="wrap_content"
+      android:text="@string/bt_hid_device_unregister" />
+
+  <include layout="@layout/pass_fail_buttons" />
+</LinearLayout>
+
+</ScrollView>
diff --git a/apps/CtsVerifier/res/layout/bt_hid_host.xml b/apps/CtsVerifier/res/layout/bt_hid_host.xml
new file mode 100644
index 0000000..ec8f627
--- /dev/null
+++ b/apps/CtsVerifier/res/layout/bt_hid_host.xml
@@ -0,0 +1,35 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2018 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    style="@style/RootLayoutPadding"
+    android:layout_width="match_parent"
+    android:layout_height="match_parent"
+    android:orientation="vertical">
+
+  <Button
+      android:id="@+id/bt_hid_host_pick_device_button"
+      android:layout_width="match_parent"
+      android:layout_height="wrap_content"
+      android:text="@string/bt_hid_host_select_device" />
+
+  <EditText
+      android:id="@+id/bt_hid_host_edit_text"
+      android:layout_width="match_parent"
+      android:layout_height="60dp" />
+
+  <include layout="@layout/pass_fail_buttons" />
+</LinearLayout>
diff --git a/apps/CtsVerifier/res/layout/camera_fov_calibration_photo_capture.xml b/apps/CtsVerifier/res/layout/camera_fov_calibration_photo_capture.xml
index 7f8efa0..c142b15 100644
--- a/apps/CtsVerifier/res/layout/camera_fov_calibration_photo_capture.xml
+++ b/apps/CtsVerifier/res/layout/camera_fov_calibration_photo_capture.xml
@@ -49,7 +49,6 @@
             android:layout_alignParentBottom="true"
             android:layout_alignParentLeft="true"
             android:padding="10sp"
-            android:popupBackground="#ffffff"
             android:textSize="18sp" />
     </RelativeLayout>
 
diff --git a/apps/CtsVerifier/res/layout/camera_video.xml b/apps/CtsVerifier/res/layout/camera_video.xml
index b81721b..5dd28ab 100644
--- a/apps/CtsVerifier/res/layout/camera_video.xml
+++ b/apps/CtsVerifier/res/layout/camera_video.xml
@@ -85,19 +85,39 @@
                 android:id="@+id/resolution_selection"
                 android:layout_width="fill_parent"
                 android:layout_height="wrap_content"/>
-            <Button
-                android:id="@+id/record_button"
-                android:layout_width="wrap_content"
-                android:layout_height="wrap_content"
-                android:text="@string/record_button_text"/>
-            <TextView
-                android:id="@+id/status_label"
-                android:layout_width="wrap_content"
-                android:layout_height="wrap_content"
-                android:text="@string/status_ready"
-                android:padding="2dp"
-                android:textSize="16sp"
-                android:gravity="center" />
+
+            <LinearLayout
+                android:orientation="horizontal"
+                android:layout_width="match_parent"
+                android:layout_height="0dp"
+                android:layout_weight="1" >
+
+                <Button
+                    android:id="@+id/record_button"
+                    android:layout_width="wrap_content"
+                    android:layout_height="wrap_content"
+                    android:text="@string/record_button_text"/>
+                <Button
+                    android:id="@+id/next_button"
+                    android:layout_height="wrap_content"
+                    android:layout_width="wrap_content"
+                    android:text="@string/cf_next_button" />
+            </LinearLayout>
+
+            <LinearLayout
+                android:layout_width="match_parent"
+                android:layout_height="0dp"
+                android:layout_weight="2" >
+
+                <TextView
+                    android:id="@+id/status_label"
+                    android:layout_width="wrap_content"
+                    android:layout_height="wrap_content"
+                    android:text="@string/status_ready"
+                    android:padding="2dp"
+                    android:textSize="16sp"
+                    android:gravity="center" />
+            </LinearLayout>
 
         </LinearLayout>
 
diff --git a/apps/CtsVerifier/res/layout/cf_main.xml b/apps/CtsVerifier/res/layout/cf_main.xml
index 1ff35cc..8a4fb7f 100644
--- a/apps/CtsVerifier/res/layout/cf_main.xml
+++ b/apps/CtsVerifier/res/layout/cf_main.xml
@@ -87,6 +87,11 @@
                 android:id="@+id/format_selection"
                 android:layout_width="fill_parent"
                 android:layout_height="wrap_content"/>
+            <Button
+                android:id="@+id/next_button"
+                android:layout_height="wrap_content"
+                android:layout_width="wrap_content"
+                android:text="@string/cf_next_button" />
 
         </LinearLayout>
 
diff --git a/apps/CtsVerifier/res/layout/device_owner_lock_task_ui.xml b/apps/CtsVerifier/res/layout/device_owner_lock_task_ui.xml
new file mode 100644
index 0000000..00dee1a
--- /dev/null
+++ b/apps/CtsVerifier/res/layout/device_owner_lock_task_ui.xml
@@ -0,0 +1,53 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2017 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+              style="@style/RootLayoutPadding"
+              android:layout_width="match_parent"
+              android:layout_height="match_parent"
+              android:orientation="vertical">
+
+    <ScrollView
+        android:layout_width="match_parent"
+        android:layout_height="match_parent"
+        android:fillViewport="true">
+        <LinearLayout
+            android:layout_width="match_parent"
+            android:layout_height="match_parent"
+            android:orientation="vertical">
+
+            <TextView
+                android:id="@+id/device_owner_lock_task_ui_instructions"
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content"
+                android:text="@string/device_owner_lock_task_ui_test_info"
+                android:textSize="18dip" />
+
+            <Button
+                android:id="@+id/start_lock_task_button"
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content"
+                android:text="@string/start_lock_task_button_label" />
+
+            <ListView
+                android:id="@+id/android:list"
+                android:layout_width="match_parent"
+                android:layout_height="0dp"
+                android:layout_weight="1" />
+
+            <include layout="@layout/pass_fail_buttons" />
+        </LinearLayout>
+    </ScrollView>
+</LinearLayout>
diff --git a/apps/CtsVerifier/res/layout/emergency_call_confirm_dialog.xml b/apps/CtsVerifier/res/layout/emergency_call_confirm_dialog.xml
new file mode 100644
index 0000000..fae5167
--- /dev/null
+++ b/apps/CtsVerifier/res/layout/emergency_call_confirm_dialog.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="utf-8"?>
+    <LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+        android:layout_width="match_parent"
+        android:layout_height="match_parent"
+        android:orientation="vertical"
+        style="@style/RootLayoutPadding">
+
+        <TextView android:id="@+id/info"
+            android:layout_width="match_parent"
+            android:layout_height="wrap_content"
+            android:layout_weight="1"
+            android:textSize="18sp"
+            android:padding="5dp"
+            android:text="@string/emergency_call_confirm_info"/>
+        <EditText
+            android:id="@+id/emergency_number"
+            android:layout_height="wrap_content"
+            android:layout_width="match_parent"
+            android:hint="@string/emergency_call_emergency_number_hint_text"
+            android:inputType="phone"
+            android:text="@string/emergency_call_emergency_number_text"/>
+        <Button
+            android:id="@+id/dial_button"
+            android:layout_width="200px"
+            android:layout_height="wrap_content"
+            android:text="@string/emergency_call_dial_text"
+            android:paddingLeft="5dp"
+            android:paddingRight="5dp"
+            android:layout_marginTop="5dp"
+            android:layout_marginRight="5dp"/>
+    </LinearLayout>
diff --git a/apps/CtsVerifier/res/layout/emergency_call_msg_test_confirm_dialog.xml b/apps/CtsVerifier/res/layout/emergency_call_msg_test_confirm_dialog.xml
new file mode 100644
index 0000000..071e2bf
--- /dev/null
+++ b/apps/CtsVerifier/res/layout/emergency_call_msg_test_confirm_dialog.xml
@@ -0,0 +1,37 @@
+<?xml version="1.0" encoding="utf-8"?>
+    <LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+        android:layout_width="match_parent"
+        android:layout_height="match_parent"
+        android:orientation="vertical"
+        style="@style/RootLayoutPadding">
+
+        <TextView android:id="@+id/info"
+            android:layout_width="match_parent"
+            android:layout_height="wrap_content"
+            android:layout_weight="1"
+            android:textSize="18sp"
+            android:padding="5dp"
+            android:text="@string/emergency_call_confirm_info"/>
+        <EditText
+            android:id="@+id/emergency_number"
+            android:layout_height="wrap_content"
+            android:layout_width="match_parent"
+            android:hint="@string/emergency_call_emergency_number_hint_text"
+            android:inputType="phone"
+            android:text="@string/emergency_call_emergency_number_text"/>
+        <EditText
+            android:id="@+id/local_phone_number"
+            android:layout_height="wrap_content"
+            android:layout_width="match_parent"
+            android:hint="@string/emergency_call_current_number_hint_text"
+            android:inputType="phone"/>
+        <Button
+            android:id="@+id/dial_button"
+            android:layout_width="200px"
+            android:layout_height="wrap_content"
+            android:text="@string/emergency_call_dial_text"
+            android:paddingLeft="5dp"
+            android:paddingRight="5dp"
+            android:layout_marginTop="5dp"
+            android:layout_marginRight="5dp"/>
+    </LinearLayout>
diff --git a/apps/CtsVerifier/res/layout/keychain_test.xml b/apps/CtsVerifier/res/layout/keychain_test.xml
new file mode 100644
index 0000000..09708c7
--- /dev/null
+++ b/apps/CtsVerifier/res/layout/keychain_test.xml
@@ -0,0 +1,62 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2018 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+        android:orientation="vertical"
+        android:layout_width="match_parent"
+        android:layout_height="match_parent">
+
+    <ScrollView
+            android:layout_width="match_parent"
+            android:layout_height="320dip"
+            android:layout_weight="2">
+        <TextView
+                android:id="@+id/provisioning_byod_keychain_instructions"
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content"
+                android:padding="10dip"
+                android:text="@string/provisioning_byod_keychain_info_start"
+                android:textSize="18dip" />
+    </ScrollView>
+
+    <TextView
+          android:id="@+id/provisioning_byod_keychain_test_log"
+          android:layout_width="match_parent"
+          android:layout_height="wrap_content"
+          android:padding="10dip"
+          android:textSize="18dip" />
+
+    <LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+        android:layout_width="match_parent"
+        android:layout_height="wrap_content"
+        android:orientation="horizontal">
+
+        <Button
+            android:id="@+id/prepare_test_button"
+            android:layout_width="204dip"
+            android:layout_height="wrap_content"
+            android:text="@string/provisioning_byod_keyguard_disabled_features_prepare_button"/>
+
+        <Button
+            android:id="@+id/run_test_button"
+            android:layout_width="204dip"
+            android:layout_height="wrap_content"
+            android:text="@string/go_button_text"/>
+
+    </LinearLayout>
+
+    <include layout="@layout/pass_fail_buttons" />
+
+</LinearLayout>
diff --git a/apps/CtsVerifier/res/layout/location_mode_item.xml b/apps/CtsVerifier/res/layout/location_mode_item.xml
deleted file mode 100644
index 5e8dedb..0000000
--- a/apps/CtsVerifier/res/layout/location_mode_item.xml
+++ /dev/null
@@ -1,54 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!--
-     Copyright (C) 2013 The Android Open Source Project
-
-     Licensed under the Apache License, Version 2.0 (the "License");
-     you may not use this file except in compliance with the License.
-     You may obtain a copy of the License at
-
-          http://www.apache.org/licenses/LICENSE-2.0
-
-     Unless required by applicable law or agreed to in writing, software
-     distributed under the License is distributed on an "AS IS" BASIS,
-     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     See the License for the specific language governing permissions and
-     limitations under the License.
--->
-<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
-    android:layout_width="match_parent"
-    android:layout_height="wrap_content" >
-
-    <ImageView
-        android:id="@+id/status"
-        android:layout_width="wrap_content"
-        android:layout_height="wrap_content"
-        android:layout_alignParentLeft="true"
-        android:layout_alignParentTop="true"
-        android:layout_marginTop="10dip"
-        android:contentDescription="@string/pass_button_text"
-        android:padding="10dip"
-        android:src="@drawable/fs_indeterminate" />
-
-    <TextView
-        android:id="@+id/instructions"
-        style="@style/InstructionsSmallFont"
-        android:layout_width="match_parent"
-        android:layout_height="wrap_content"
-        android:layout_alignParentRight="true"
-        android:layout_alignParentTop="true"
-        android:layout_toRightOf="@id/status"
-        android:text="@string/location_mode_select_high_accuracy" />
-
-    <Button
-        android:id="@+id/launch_settings"
-        android:layout_width="match_parent"
-        android:layout_height="wrap_content"
-        android:layout_alignParentRight="true"
-        android:layout_below="@id/instructions"
-        android:layout_marginLeft="20dip"
-        android:layout_marginRight="20dip"
-        android:layout_toRightOf="@id/status"
-        android:onClick="launchSettings"
-        android:text="@string/location_mode_start_settings" />
-
-</RelativeLayout>
diff --git a/apps/CtsVerifier/res/layout/multinetwork_connectivity.xml b/apps/CtsVerifier/res/layout/multinetwork_connectivity.xml
new file mode 100644
index 0000000..481d04b
--- /dev/null
+++ b/apps/CtsVerifier/res/layout/multinetwork_connectivity.xml
@@ -0,0 +1,82 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2018 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
+                android:layout_width="match_parent"
+                android:layout_height="match_parent"
+                style="@style/RootLayoutPadding">
+
+    <TextView android:id="@+id/test_prerequisites"
+              android:layout_width="match_parent"
+              android:layout_height="wrap_content"
+              android:text="@string/multinetwork_connectivity_test_pre_requisites"
+              style="@style/InstructionsSmallFont"
+    />
+    <TextView android:id="@+id/test_ap_label"
+              android:layout_width="wrap_content"
+              android:layout_height="wrap_content"
+              android:layout_below="@id/test_prerequisites"
+              android:text="@string/multinetwork_connectivity_test_ap_name"
+              style="@style/InstructionsSmallFont"
+    />
+    <EditText android:id="@+id/test_ap_ssid"
+              android:layout_width="match_parent"
+              android:layout_height="wrap_content"
+              android:layout_below="@id/test_ap_label"
+              android:inputType="text"
+              style="@style/InstructionsSmallFont"
+    />
+    <TextView android:id="@+id/test_psk_label"
+              android:layout_width="wrap_content"
+              android:layout_height="wrap_content"
+              android:layout_below="@id/test_ap_ssid"
+              android:text="@string/multinetwork_connectivity_test_ap_passphrase"
+              style="@style/InstructionsSmallFont"
+    />
+    <EditText android:id="@+id/test_ap_psk"
+              android:layout_width="match_parent"
+              android:layout_height="wrap_content"
+              android:layout_below="@id/test_psk_label"
+              android:inputType="textPassword"
+              style="@style/InstructionsSmallFont"
+    />
+    <TextView android:id="@+id/current_test"
+              android:layout_width="match_parent"
+              android:layout_height="wrap_content"
+              android:layout_below="@id/test_ap_psk"
+              android:text="@string/multinetwork_connectivity_test_1_desc"
+              style="@style/InstructionsSmallFont"
+    />
+    <TextView android:id="@+id/test_progress_info"
+              android:layout_width="match_parent"
+              android:layout_height="wrap_content"
+              android:layout_below="@id/current_test"
+              style="@style/InstructionsSmallFont"
+    />
+    <Button android:id="@+id/start_multinet_btn"
+            android:text="@string/multinetwork_connectivity_test_start"
+            android:layout_width="wrap_content"
+            android:layout_height="wrap_content"
+            android:layout_below="@id/test_progress_info"
+            android:layout_centerHorizontal="true"
+    />
+
+
+    <include android:id="@+id/pass_fail_buttons"
+             android:layout_width="match_parent"
+             android:layout_height="wrap_content"
+             android:layout_alignParentBottom="true"
+             layout="@layout/pass_fail_buttons"/>
+</RelativeLayout>
diff --git a/apps/CtsVerifier/res/layout/poa_touch.xml b/apps/CtsVerifier/res/layout/poa_touch.xml
index cba7e98..c40766d 100644
--- a/apps/CtsVerifier/res/layout/poa_touch.xml
+++ b/apps/CtsVerifier/res/layout/poa_touch.xml
@@ -20,6 +20,7 @@
         android:layout_width="match_parent"
         android:layout_height="match_parent"
         android:focusable="true"
-        android:focusableInTouchMode="true" />
+        android:focusableInTouchMode="true"
+        android:defaultFocusHighlightEnabled="false" />
 
 </RelativeLayout>
diff --git a/apps/CtsVerifier/res/layout/positive_managed_user.xml b/apps/CtsVerifier/res/layout/positive_managed_user.xml
new file mode 100644
index 0000000..4afdf63
--- /dev/null
+++ b/apps/CtsVerifier/res/layout/positive_managed_user.xml
@@ -0,0 +1,47 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2017 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    style="@style/RootLayoutPadding"
+    android:layout_width="match_parent"
+    android:layout_height="match_parent"
+    android:orientation="vertical">
+
+    <ScrollView
+        android:layout_width="match_parent"
+        android:layout_height="match_parent"
+        android:fillViewport="true">
+        <LinearLayout
+            android:layout_width="match_parent"
+            android:layout_height="match_parent"
+            android:orientation="vertical">
+
+            <TextView
+                android:id="@+id/positive_managed_user_instructions"
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content"
+                android:text="@string/managed_user_positive_tests_instructions"
+                android:textSize="18dip" />
+
+            <ListView
+                android:id="@+id/android:list"
+                android:layout_width="match_parent"
+                android:layout_height="0dp"
+                android:layout_weight="1" />
+
+            <include layout="@layout/pass_fail_buttons" />
+        </LinearLayout>
+    </ScrollView>
+</LinearLayout>
diff --git a/apps/CtsVerifier/res/layout/snsr_next_button.xml b/apps/CtsVerifier/res/layout/snsr_next_button.xml
index c4701cc..cd5a970 100644
--- a/apps/CtsVerifier/res/layout/snsr_next_button.xml
+++ b/apps/CtsVerifier/res/layout/snsr_next_button.xml
@@ -21,10 +21,11 @@
     android:paddingTop="@dimen/snsr_view_padding_top">
 
     <Button
-        android:id="@+id/next_button"
+        android:id="@+id/retry_button"
         android:layout_width="match_parent"
         android:layout_height="wrap_content"
-        android:text="@string/next_button_text" />
+        android:text="@string/retry_button_text"
+        android:visibility="gone" />
 
     <Button
         android:id="@+id/pass_button"
@@ -40,4 +41,10 @@
         android:drawableTop="@drawable/fs_error"
         android:visibility="gone" />
 
+    <Button
+        android:id="@+id/next_button"
+        android:layout_width="match_parent"
+        android:layout_height="wrap_content"
+        android:text="@string/next_button_text" />
+
 </GridLayout>
diff --git a/apps/CtsVerifier/res/layout/tapjacking.xml b/apps/CtsVerifier/res/layout/tapjacking.xml
index 6e5ca73..998e624 100644
--- a/apps/CtsVerifier/res/layout/tapjacking.xml
+++ b/apps/CtsVerifier/res/layout/tapjacking.xml
@@ -31,8 +31,7 @@
                     android:layout_width="match_parent"
                     android:layout_height="wrap_content"
                     android:layout_alignParentRight="true"
-                    android:layout_alignParentTop="true"
-                    android:text="@string/usb_tapjacking_test_instructions" />
+                    android:layout_alignParentTop="true" />
 
             <Button android:id="@+id/tapjacking_btn"
                     android:text="@string/usb_tapjacking_button_text"
diff --git a/apps/CtsVerifier/res/layout/voicemail_broadcast.xml b/apps/CtsVerifier/res/layout/voicemail_broadcast.xml
index 163da23..0cf1d39 100644
--- a/apps/CtsVerifier/res/layout/voicemail_broadcast.xml
+++ b/apps/CtsVerifier/res/layout/voicemail_broadcast.xml
@@ -58,6 +58,12 @@
       android:layout_height="wrap_content"
       android:text="@string/voicemail_set_default_dialer_button"/>
 
+    <Button
+      android:id="@+id/call_settings_check_not_applicable"
+      android:layout_width="wrap_content"
+      android:layout_height="wrap_content"
+      android:text="@string/visual_voicemail_service_remove_sim_not_applicable"/>
+
     <LinearLayout
       android:layout_width="wrap_content"
       android:layout_height="wrap_content"
diff --git a/apps/CtsVerifier/res/raw/cacert.der b/apps/CtsVerifier/res/raw/cacert.der
index 3934e1b..9acf82a 100644
--- a/apps/CtsVerifier/res/raw/cacert.der
+++ b/apps/CtsVerifier/res/raw/cacert.der
Binary files differ
diff --git a/apps/CtsVerifier/res/raw/usercert.der b/apps/CtsVerifier/res/raw/usercert.der
index cdfb8f7..cb48852 100644
--- a/apps/CtsVerifier/res/raw/usercert.der
+++ b/apps/CtsVerifier/res/raw/usercert.der
Binary files differ
diff --git a/apps/CtsVerifier/res/raw/userkey.der b/apps/CtsVerifier/res/raw/userkey.der
index 31f1f8c..9216bb8 100644
--- a/apps/CtsVerifier/res/raw/userkey.der
+++ b/apps/CtsVerifier/res/raw/userkey.der
Binary files differ
diff --git a/apps/CtsVerifier/res/values-watch/strings.xml b/apps/CtsVerifier/res/values-watch/strings.xml
index 7fb22e9..ca06686 100644
--- a/apps/CtsVerifier/res/values-watch/strings.xml
+++ b/apps/CtsVerifier/res/values-watch/strings.xml
@@ -59,4 +59,5 @@
         6) Unlock the device.\n
         7) Repeat steps (1) through (6) for each screen lock type other than \"None\".
     </string>
+    <string name="usb_tapjacking_usb_debugging_component">com.google.android.apps.wearable.settings/com.google.android.clockwork.settings.SecureAdbActivityAlias</string>
 </resources>
diff --git a/apps/CtsVerifier/res/values/strings.xml b/apps/CtsVerifier/res/values/strings.xml
index 9a97426..4f44c1c 100755
--- a/apps/CtsVerifier/res/values/strings.xml
+++ b/apps/CtsVerifier/res/values/strings.xml
@@ -24,6 +24,10 @@
     <string name="next_button_text">Next</string>
     <string name="go_button_text">Go</string>
 
+    <string name="retry_button_text">Retry</string>
+    <string name="finish_button_text">Finish</string>
+    <string name="fail_and_next_button_text">Fail and Next</string>
+
     <!-- Strings for TestListActivity -->
     <string name="test_category_audio">Audio</string>
     <string name="test_category_camera">Camera</string>
@@ -168,6 +172,7 @@
     <string name="sec_fp_dialog_message">Authenticate now with fingerprint</string>
     <string name="sec_fp_auth_failed">Authentication failed</string>
     <string name="sec_start_test">Start Test</string>
+    <string name="sec_fingerprint_dialog_bound_key_test">Fingerprint Bound Keys Test (System Dialog)</string>
 
     <!-- Strings for BluetoothActivity -->
     <string name="bluetooth_test">Bluetooth Test</string>
@@ -191,6 +196,7 @@
     <string name="bt_control">Bluetooth Control</string>
     <string name="bt_device_communication">Device Communication</string>
     <string name="bt_le">Bluetooth LE</string>
+    <string name="bt_hid">Bluetooth HID</string>
 
     <string name="bt_toggle_bluetooth">Toggle Bluetooth</string>
     <string name="bt_toggle_instructions">Disable and enable Bluetooth to successfully complete this test.</string>
@@ -223,6 +229,44 @@
         devices together and start the test.
     </string>
 
+    <!-- Strings for HidHost and HidDevice activity -->
+    <string name="bt_hid_host_test_name">Bluetooth HID Host Test</string>
+    <string name="bt_hid_host_test_info">
+        Start the CTS Verifier on another device, start the Bluetooth test, and choose
+        \"Bluetooth HID Device\" to complete the test.
+        \n\nClick \"Register app\" and \"Make discoverable\" on another device first, and click
+        \"Select device\". Choose the device from device picker. If the remote device is already
+        paired, unpair it first from system Settings.
+        \n\nAfter the remote device completes the \"Send_report\" command, text \"bluetooth\" should
+        appear in the EditText. Mark the test as passed.
+        \n\n If the device under test (DUT) does not have Bluetooth HID Host service or HID Device
+        service enabled, mark the test as passed.
+    </string>
+    <string name="bt_hid_host">Bluetooth HID Host</string>
+    <string name="bt_hid_host_select_device">Select device</string>
+
+    <string name="bt_hid_device_test_name">Bluetooth HID Device Test</string>
+    <string name="bt_hid_device_test_info">
+        Start the CTS Verifier on another device, start the Bluetooth test, and choose
+        \"Bluetooth HID Host\" to complete the test.
+        \n\nFirst, click the \"Register app\" button, and click the \"Make discoverable\" button.
+        \n\nThen, on another device, click the \"Select device" button, and choose this device.
+        \n\nWait until the dialog shows up for pairing two devices.
+        \n\nFinally, click the \"Test send_report\", \"Test reply_report\", \"Test report_error\".
+        \n\nIf all the commands are successful, then click \"Unregister app\" and mark the test as
+        passed.
+        \n\n If the device under test (DUT) does not have Bluetooth HID Host service or HID Device
+        service enabled, mark the test as passed.
+    </string>
+
+    <string name="bt_hid_device">Bluetooth HID Device</string>
+    <string name="bt_hid_device_register">Register app</string>
+    <string name="bt_hid_device_unregister">Unregister app</string>
+    <string name="bt_hid_device_send_report">Test Send_report</string>
+    <string name="bt_hid_device_reply_report">Test Reply_report</string>
+    <string name="bt_hid_device_report_error">Test Report_error</string>
+
+
     <string name="bt_secure_server">Secure Server</string>
     <string name="bt_secure_server_instructions">Start the CTS Verifier on another device, start the Bluetooth test, and choose \"Secure Client\" to complete the test.</string>
     <string name="bt_insecure_server">Insecure Server</string>
@@ -271,9 +315,9 @@
     <string name="ble_write_descriptor_name">Bluetooth LE Write Descriptor</string>
     <string name="ble_read_rssi_name">Bluetooth LE Read RSSI</string>
     <string name="ble_client_disconnect_name">Bluetooth LE Client Disconnect</string>
-    <string name="ble_client_test_info">
+    <string name="ble_insecure_client_test_info">
         The Bluetooth LE test must be done simultaneously on two devices. This device is the client.
-        All tests listed here must be done with out pairing.
+        All tests listed here must be done without pairing.
     </string>
     <string name="ble_client_send_connect_info">Type in the Bluetooth address of the remote device to connect to, and verify that the devices are connected.</string>
     <string name="ble_discover_service_info">Verify that the service is discovered when you press the "Discover Service" button.</string>
@@ -561,10 +605,35 @@
         (for example, stationary on a windowsill.  If needed, try again, outside, also with the
         device stationary, and with at least some view of the sky.) and then press Next to run
         the automated tests.</string>
+    <string name="location_emergency_call_test_info">This test verifies whether basic features
+        (wifi, sms, gps) works correctly during the emergency call. Make sure the device is using
+        a special white listed sim card. The wifi and GPS should be on and have internet connection.
+        Press the pass button to skip this test if outside US/Canada.
+    </string>
+    <string name="emergency_call_confirm_info">This test will dial 911! Please make sure to use a
+        whitelisted sim card to run this test!
+    </string>
+    <string name="emergency_call_skip_info">Current device doesn\'t support cellular network. Skipping the Test.
+    </string>
+    <string name="emergency_call_emergency_number_hint_text">
+        Emergency Number:
+    </string>
+    <string name="emergency_call_current_number_hint_text">
+        Current Number:
+    </string>
+    <string name="emergency_call_dial_text">
+        Dial 911
+    </string>
+    <string name="emergency_call_emergency_number_text">
+        911
+    </string>
     <string name="location_gnss_test_retry_info">If this test fails, please make sure the device
         has line of sight to GNSS satellites (for example, stationary on a windowsill. If needed,
         try again, outside, also with the device stationary, with as much view of the sky as
         possible.) </string>
+    <string name="location_emergency_call_gps_test">Emergency Call GPS Test</string>
+    <string name="location_emergency_call_message_test">Emergency Call Message Test</string>
+    <string name="location_emergency_call_wifi_test">Emergency Call Wifi Test</string>
 
     <!-- Strings for ConnectivityBackgroundTestActivity -->
     <string name="network_background_test">Network Background Connectivity Test</string>
@@ -581,6 +650,50 @@
     </string>
     <string name="network_background_test_start">Start</string>
 
+    <!-- Strings for net.MultiNetworkConnectivityTestActivity -->
+    <string name="multinetwork_connectivity_test">Multinetwork connectivity Test</string>
+    <string name="multinetwork_connectivity_test_instructions">
+        This test verifies that, when a phone has internet connectivity via mobile network
+        is connected to a Wi-Fi access point that doesn\'t have internet or loses internet access,
+        it restores the internet activity over the mobile cell network, while connected to the
+        Wi-Fi access point. \n\n
+
+        1. Setup a wireless access point with ability to turn on and off internet access.
+        2. Have SIM or cellular data connectivity on the phone being tested.
+        3. Execute the tests with the instructions outlined in the test
+        4. When the test completes, it will finish and mark it passed. If it fails, it will mark it \n
+           as failed.
+
+    </string>
+    <string name="multinetwork_connectivity_test_pre_requisites">Prerequisite - Setup a Wi-Fi access point with WPA PSK in which we can turn on or off internet access. </string>
+    <string name="multinetwork_connectivity_test_start">Start</string>
+    <string name="multinetwork_connectivity_test_ap_name">Wi-Fi SSID</string>
+    <string name="multinetwork_connectivity_test_ap_passphrase">WPA 2 passphrase</string>
+    <string name="multinetwork_connectivity_test_continue">Confirm and continue with test.</string>
+    <string name="multinetwork_connectivity_test_rerun">Completed. Re-run.</string>
+    <string name="multinetwork_connectivity_test_running">Running.</string>
+    <string name="multinetwork_connectivity_test_connect_cellular">Connecting to cellular network.</string>
+    <string name="multinetwork_connectivity_test_connect_wifi">Connecting to Wi-Fi network.</string>
+    <string name="multinetwork_connectivity_test_complete">Test completed.</string>
+    <string name="multinetwork_connectivity_test_progress_1">Waiting for Wi-Fi to lose connectivity.</string>
+    <string name="multinetwork_connectivity_test_progress_2">Waiting to check connectivity.</string>
+    <string name="multinetwork_connectivity_test_progress_3">Waiting to make sure Wi-Fi has connectivity.</string>
+    <string name="multinetwork_connectivity_test_1_desc">Test 1 - Connect to Wi-Fi with no internet doesnt disable current connectivity</string>
+    <string name="multinetwork_connectivity_test_2_desc">Test 2 - When connected to Wi-Fi, on losing connectivity, restores mobile connectivity</string>
+    <string name="multinetwork_status_wifi_connect_success">Wi-Fi connect success.</string>
+    <string name="multinetwork_status_mobile_connect_success">Mobile net connect success.</string>
+    <string name="multinetwork_status_wifi_connect_timed_out">Wi-Fi connect timed out.</string>
+    <string name="multinetwork_status_wifi_connect_wrong_ap">Wi-Fi connected to wrong access point.</string>
+    <string name="multinetwork_status_mobile_connect_timed_out">Mobile network connect timed out.</string>
+    <string name="multinetwork_status_mobile_restore_success">Mobile restore succeeded.</string>
+    <string name="multinetwork_status_mobile_restore_failed">Mobile restore failed.</string>
+    <string name="multinetwork_connectivity_test_1_prereq">Make sure that the Hotspot does not have internet access.</string>
+    <string name="multinetwork_connectivity_test_2_prereq_1">Make sure that the Hotspot has internet access.</string>
+    <string name="multinetwork_connectivity_test_2_prereq_2">Make sure that the Hotspot does not have internet access. When prompted to go back to using mobile data, choose ok.</string>
+    <string name="multinetwork_connectivity_test_all_prereq_1">Looks like your device does not support telephony or mobile data. If yes, you can mark test passed and proceed.</string>
+    <string name="multinetwork_connectivity_test_all_prereq_2">Need mobile data to proceed. Please insert a mobile data capable sim and repeat the test. By marking test as passed, you acknowledge that the device cannot do mobile data.</string>
+    <string name="multinetwork_status_wifi_connectivity_failed">Wi-Fi connectivity failed.</string>
+
     <!-- Strings for NfcTestActivity -->
     <string name="nfc_test">NFC Test</string>
     <string name="nfc_test_info">The Peer-to-Peer Data Exchange tests require two devices with
@@ -799,6 +912,8 @@
     <string name="snsr_keep_device_rotating_clockwise">Once the test begins, you will have to keep rotating the device clockwise.</string>
     <string name="snsr_wait_for_user">Press \'Next\' to continue.</string>
     <string name="snsr_wait_to_begin">Press \'Next\' to begin.</string>
+    <string name="snsr_wait_to_retry">Press \'Retry\' to rerun or \'Fail and Next\' to skip to next.</string>
+    <string name="snsr_wait_to_finish">Press \'Retry\' to rerun or \'Finish\' to end.</string>
     <string name="snsr_on_complete_return">After completing the task, go back to this test.</string>
     <string name="snsr_movement_expected">Movement was expected during the test. Found=%1$b.</string>
     <string name="snsr_sensor_feature_deactivation">IMPORTANT NOTE: Please also turn off any special features in the
@@ -902,6 +1017,9 @@
     firstEventReceivedMs=%2$d diffMs=%3$d toleranceMs=%4$d </string>
     <string name="snsr_device_suspend_test_instr">One you begin the test, disconnect USB, turn off the display and allow
     the device to go into suspend mode. The screen will turn on and a sound will be played once all the tests are completed.</string>
+    <string name="snsr_device_suspend_service">Device Suspend Service</string>
+    <string name="snsr_device_suspend_service_active">Device Suspend Test Active</string>
+    <string name="snsr_device_suspend_service_notification">Device Suspend Test is using sensors.</string>
 
     <!-- Significant Motion -->
     <string name="snsr_significant_motion_test">Significant Motion Tests</string>
@@ -921,6 +1039,17 @@
     You will need to walk to ensure that Significant Motion triggers. The screen will turn on and a sound will be played once the test completes.</string>
     <string name="snsr_device_did_not_wake_up_at_trigger">Device did not wakeup at trigger time. wakeTime=%1$d ms triggerTime=%2$d ms</string>
 
+    <!-- Event sanitization for idle UIDs -->
+    <string name="snsr_event_sanitization_test">Event sanitization for idle UID test</string>
+    <string name="snsr_event_sanitization_test_setup">Run the \'adb shell cmd sensorservice set-uid-state com.android.cts.verifier idle\' shell command
+        to emulate the CtsVerifier UID being idle.</string>
+    <string name="snsr_event_sanitization_test_cleanup">Run the \'adb shell cmd sensorservice reset-uid-state com.android.cts.verifier\' shell command
+        to stop emulating the CtsVerifier UID being idle. Failing to do that would lead to other tests failing!</string>
+    <string name="snsr_significant_motion_test_uid_idle">Move around with the device to try triggering significant motion</string>
+    <string name="snsr_significant_motion_test_uid_idle_expectation">No trigger events should be generated while idle</string>
+    <string name="snsr_proximity_test_uid_idle">Touch the proximity sensor to try triggering it</string>
+    <string name="snsr_proximity_test_uid_idle_expectation">No on-change events should be generated while idle</string>
+
     <!-- Low Latency Off-Body Detect -->
     <string name="snsr_offbody_sensor_test">Off Body Sensor Tests</string>
     <string name="snsr_offbody_sensor_registration">Registration failed for low latency offbody detect sensor.\n</string>
@@ -960,12 +1089,13 @@
     <string name="usb_tapjacking_test_instructions">
         1. Connect device via usb to computer.\n
         2. Click \"Show overlay\" button.  Settings may appear if the CTS Verifier app doesn\'t have display over apps permission.  Enable this permission and then click back to navigate back to the app.\n
-        3. Trigger USB debugging dialog (from computer terminal): \"adb shell am start -e fingerprints placeholder -e key placeholder com.android.systemui/.usb.UsbDebuggingActivity\"\n
-        4. USB debugging dialog should appear with the overlay on top saying \"This message covers the USB debugging RSA prompt\" to appear.\n
+        3. Trigger USB debugging dialog (from computer terminal): \"adb shell am start -e fingerprints placeholder -e key placeholder %s\"\n
+        4. USB debugging dialog should appear.  If the overlay cannot be seen above the USB debugging dialog, PASS this test (no need to proceed to Step 5).  Else, if the overlay does appear on top saying \"This message covers the USB debugging RSA prompt\", continue to Step 5.\n
         5. Try clicking OK. \n
         Test pass if you cannot click OK when the text quoted above is on top of the USB debugging dialog.  Toast should appear saying there is an overlay so Settings cannot verify your response. \n
         Note: Fake message overlay may remain on screen until you leave the test. This is working as intended. \n
     </string>
+    <string name="usb_tapjacking_usb_debugging_component">com.android.systemui/.UsbDebuggingActivityAlias</string>
     <string name="usb_tapjacking_overlay_message">This message covers the USB debugging RSA prompt</string>
     <string name="usb_tapjacking_error_toast">Please restart the application and try again.</string>
     <string name="usb_tapjacking_error_toast2">Please enable display over apps permission for this application before proceeding.</string>
@@ -1012,6 +1142,7 @@
     <string name="ci_intents_label">Intents Test</string>
     <string name="ci_intents_direction_label">clockwise</string>
     <string name="ci_instruction_heading_label">Instructions:</string>
+    <string name="ci_directory_creation_error">CTS Verifier debug directory could not be created, please try again</string>
     <string name="ci_instruction_text_photo_label">READ BEFORE STARTING TEST</string>
     <string name="ci_instruction_text_passfail_label">Choose \"Pass\" if the right intent is fired after taking a photo from the camera app. Otherwise, choose \"Fail\".</string>
     <string name="ci_instruction_text_app_picture_label">\n
@@ -1061,10 +1192,12 @@
     the right view must be horizontally mirrored relative to the left
     view.\n - Note that the frame rate of the right view may be much
     lower than on the left; this is not an indication of a failed
-    test.
+    test.\n - The next button triggers the next resolution and format
+    combination of the current camera to show; this is optional.
     </string>
     <string name="cf_preview_label">Normal preview</string>
     <string name="cf_format_label">Processed callback data</string>
+    <string name="cf_next_button">Next</string>
 
     <!-- Strings for Camera Video -->
     <string name="record_button_text">Test</string>
@@ -1075,10 +1208,11 @@
     seconds of video recording. Playback will show up in the right view
     window after recording is complete. \n - Use the spinners to choose
     camera and resolution combinations. The playback should be similar
-    to what you saw in preview. \n - After all possible combinations
-    are tested, the pass button will be enabled. You may press the pass
-    button to indicate a pass. \n - You may press fail button any time during
-    the test to indicate failure.
+    to what you saw in preview. \n - The next button triggers test for the next
+    untested resolution of the currently selected camera. - After all possible
+    combinations are tested, the pass button will be enabled. You may press the
+    pass button to indicate a pass. \n - You may press fail button any time
+    during the test to indicate failure. \n
     </string>
     <string name="video_capture_label">Video capture</string>
     <string name="video_playback_label">Video playback</string>
@@ -1175,9 +1309,8 @@
     <string name="no_camera_manager">
         No camera manager exists!  This test device is in a bad state.
     </string>
-    <string name="all_legacy_devices">
-        All cameras on this device are LEGACY mode only - ITS tests are only required on LIMITED
-        or better devices.  Pass.
+    <string name="all_exempted_devices">
+        All cameras on this device are exempted from ITS - Pass.
     </string>
     <string name="its_test_passed">All Camera ITS tests passed.  Pass button enabled!</string>
     <string name="its_test_failed">Some Camera ITS tests failed.</string>
@@ -1368,9 +1501,9 @@
         start in a role, and the other should start in the complementary
         role. Your device must pass the tests in all roles.
     </string>
-    <string name="aware_not_enabled">Wi-Fi is not enabled</string>
-    <string name="aware_not_enabled_message">These tests require Wi-Fi to be enabled.
-        Click the button below to go to system settings and enable Wi-Fi.</string>
+    <string name="aware_not_enabled">Wi-Fi / Location Mode is not enabled</string>
+    <string name="aware_not_enabled_message">These tests require Wi-Fi and Location Mode to be enabled.
+        Click the button below to go to system settings and enable Wi-Fi and Location Mode.</string>
     <string name="aware_settings">Wi-Fi Settings</string>
     <string name="aware_setup_error">
         Test failed.\n\nSet up error. Check whether Wi-Fi is enabled.</string>
@@ -1381,6 +1514,7 @@
     <string name="aware_dp_ib_passphrase_unsolicited">Data Path: Passphrase: Unsolicited/Passive</string>
     <string name="aware_dp_ib_open_solicited">Data Path: Open: Solicited/Active</string>
     <string name="aware_dp_ib_passphrase_solicited">Data Path: Passphrase: Solicited/Active</string>
+    <string name="aware_discovery_ranging">Discovery with Ranging</string>
     <string name="aware_publish">Publish</string>
     <string name="aware_subscribe">Subscribe</string>
 
@@ -1403,6 +1537,7 @@
     <string name="aware_status_publish_timeout">Publish failure - timed out!</string>
     <string name="aware_status_publish_null_session">Publish failure - null session!</string>
     <string name="aware_status_discovery">Service discovered ...</string>
+    <string name="aware_status_discovery_with_info">Service discovered ... peer MAC : %1$s</string>
     <string name="aware_status_discovery_timeout">Service discovery failure - timed out!</string>
     <string name="aware_status_discovery_fail">Service discovery failure - parameter mismatch!</string>
     <string name="aware_status_send_success">Sent message successfully ...</string>
@@ -1410,6 +1545,9 @@
     <string name="aware_status_send_timeout">Send message failure - timed out!</string>
     <string name="aware_status_send_fail_parameter">Send message failure - mismatched ids!</string>
     <string name="aware_status_received">Received message ...</string>
+    <string name="aware_status_starting_rtt">Starting RTT operations ...</string>
+    <string name="aware_status_waiting_for_peer_rtt">Pausing to let other device perform RTT ...</string>
+    <string name="aware_status_received_peer_rtt_done">Other device done with RTT ...</string>
     <string name="aware_status_received_mac">Received peer MAC address: %1$s ...</string>
     <string name="aware_status_receive_timeout">Receive message failure - timed out!</string>
     <string name="aware_status_receive_failure">Receive message failure - didn\'t receive expected message!</string>
@@ -1417,6 +1555,11 @@
     <string name="aware_status_network_success">Network formed ...</string>
     <string name="aware_status_network_failed">Network request failure - timed out!</string>
     <string name="aware_status_sleeping_wait_for_responder">Pausing to let Responder time to set up ...</string>
+    <string name="aware_status_ranging_peer_failure">Ranging to PeerHandle failure: %1$d failures of %2$d attempts!</string>
+    <string name="aware_status_ranging_mac_failure">Ranging to MAC address failure: %1$d failures of %2$d attempts!</string>
+    <string name="aware_status_ranging_peer_success">Ranging to PeerHandle success: %1$d successes of %2$d attempts!</string>
+    <string name="aware_status_ranging_mac_success">Ranging to MAC address success: %1$d successes of %2$d attempts!</string>
+    <string name="aware_status_lifecycle_failed">Discovery lifecycle FAILURE!</string>
     <string name="aware_status_lifecycle_ok">Discovery lifecycle validated!</string>
 
     <string name="aware_data_path_open_unsolicited_publish">Data Path: Open: Unsolicited Publish</string>
@@ -1443,6 +1586,10 @@
     <string name="aware_data_path_oob_passphrase_responder_info">The responder is now ready.\n\nOn the other device: start the \'Data Path (OOB): Passphrase\' / \'Initiator\' test.</string>
     <string name="aware_data_path_oob_passphrase_initiator">Data Path (OOB): Passphrase: Initiator</string>
 
+    <string name="aware_discovery_ranging_publish">Discovery with Ranging: Publish</string>
+    <string name="aware_discovery_ranging_publish_info">The publisher is now ready.\n\nOn the other device: start the \'Discovery with Ranging\' / \'Subscribe\' test.</string>
+    <string name="aware_discovery_ranging_subscribe">Discovery with Ranging: Subscribe</string>
+
     <string name="camera_fov_calibration">Camera FOV Calibration</string>
     <string name="camera_fov_calibration_done">Done</string>
     <string name="camera_fov_general_settings">General settings</string>
@@ -1515,17 +1662,15 @@
     <string name="test_mute_dnd_affected_streams">Test mute streams</string>
     <string name="test_ringer_manager">Test RingtoneManager</string>
     <string name="enable_sound_effects">Please enable sound effects in Sound settings.</string>
-
     <string name="attention_ready">I\'m done</string>
     <string name="attention_filter_any">Please enable \"Do not disturb\" by tapping the Quick Settings tile.</string>
     <string name="attention_filter_all">Please disable \"Do not disturb\" by tapping the Quick Settings tile.</string>
-    <string name="attention_filter_priority">Please select \"Priority only\" in the dialog that appears
-        when you tap the \"Do not disturb\" tile in Quick Settings, and customize the setting to allow messages from
-        starred contacts only by tapping "More settings".</string>
-    <string name="attention_filter_alarms">Please select \"Alarms only\" in the dialog that appears
-        when you tap the \"Do not disturb\" tile in Quick Settings.</string>
-    <string name="attention_filter_none">Please select \"Total silence\" in the dialog that appears
-        when you tap the \"Do not disturb\" tile in Quick Settings.</string>
+    <string name="attention_filter_priority">Please enable \"Do not disturb\" by tapping the Quick
+        Settings tile.  Then, long press the same tile and customize the setting to allow messages
+        from starred contacts only.</string>
+    <string name="attention_filter_priority_mimic_alarms_only">Please enable Priority-Only \"Do not disturb\"
+        by tapping the Quick Settings tile.  Then, long press the same tile and customize the setting to allow sounds
+        from Alarms and Media (if applicable) only.</string>
     <string name="attention_create_contacts">Create contacts for notification annotations.</string>
     <string name="attention_delete_contacts">Delete test contacts.</string>
     <string name="attention_default_order">Check that ranker defaults to time order.</string>
@@ -1536,9 +1681,13 @@
     <string name="attention_phone_order">Check that ranker respects telephone URIs for contacts.</string>
     <string name="attention_interruption_order">Check that ranker temporarily boosts interruptions.
     This test takes 30 seconds to complete.</string>
-    <string name="attention_none_are_filtered">Check that \"All\" mode doesn\'t filter any notifications.</string>
-    <string name="attention_some_are_filtered">Check that \"Priority\" mode doesn\'t filter priority notifications.</string>
+    <string name="attention_none_are_filtered_messages">Check that \"All\" mode doesn\'t filter any notifications (messages).</string>
+    <string name="attention_none_are_filtered_diff_categories">Check that \"All\" mode doesn\'t filter any notifications (event, reminder, alarm).</string>
+    <string name="attention_some_are_filtered_messages">Check that \"Priority\" mode doesn\'t filter priority notifications (messages from starred contacts).</string>
+    <string name="attention_some_are_filtered_alarms">Check that \"Priority\" mode doesn\'t filter priority notifications (alarms).</string>
+    <string name="attention_some_are_filtered_media_system_other">Check that \"Priority\" mode doesn\'t filter priority notifications (media, system, other).</string>
     <string name="attention_all_are_filtered">Check that \"None\" mode filters all notifications.</string>
+    <string name="attention_cannot_disallow_alarms_or_media">Check that apps targeted with Pre-P SDK can\'t disallow alarms or media from bypassing DND.</string>
     <string name="nls_test">Notification Listener Test</string>
     <string name="nas_test">Notification Assistant Test</string>
     <string name="nls_service_name">Notification Listener for CTS Verifier</string>
@@ -1575,6 +1724,10 @@
         under Apps > Gear Icon > Default > Notification Assistant and return here.</string>
     <string name="nls_enable_service">Please enable \"Notification Listener for CTS Verifier\"
         under Apps > Gear Icon > Special Access > Notification Access and return here.</string>
+    <string name="nls_block_app">Please block the linked application and return here.</string>
+    <string name="nls_unblock_app">Please unblock the linked application and return here.</string>
+    <string name="nls_block_channel">Please block the linked notification channel and return here.</string>
+    <string name="nls_block_group">Please block the linked notification channel group and return here.</string>
     <string name="nls_cannot_enable_service">Please make sure you cannot enable
         \"Notification Listener for CTS Verifier\" and return here.</string>
     <string name="nls_disable_service">Please disable \"Notification Listener for CTS Verifier\"
@@ -1593,6 +1746,7 @@
     <string name="nas_snooze_context">Check that the Assistant can snooze a notification until a given context.</string>
     <string name="nls_clear_one">Check that service can clear a notification.</string>
     <string name="nls_clear_one_reason">Check that service can clear a notification and receive the correct reason for dismissal.</string>
+    <string name="nls_clear_one_stats">Check that service does not receive notification stats.</string>
     <string name="nls_clear_all">Check that service can clear all notifications.</string>
     <string name="nls_service_stopped">Service should stop once disabled.</string>
     <string name="nls_note_missed">Check that notification was not received.</string>
@@ -1624,57 +1778,6 @@
     <string name="cp_get_rules">Retrieving Automatic Zen Rules</string>
     <string name="cp_get_rule">Retrieving Automatic Zen Rule</string>
 
-    <string name="location_mode_high_accuracy_test">High Accuracy Mode Test</string>
-    <string name="location_mode_high_accuracy_info">
-        This test checks that the Location Mode API is consistent with the
-        Location Provider API when the device is in High Accuracy location mode.
-    </string>
-    <string name="location_mode_select_high_accuracy">
-        Please select the \"High accuracy\" mode at Settings > Location
-        (hint: tap the "Mode" item) and return here.
-    </string>
-    <string name="location_mode_battery_saving_test">Battery Saving Mode Test</string>
-    <string name="location_mode_battery_saving_info">
-        This test checks that the Location Mode API is consistent with the
-        Location Provider API when the device is in Battery Saving location mode.
-    </string>
-    <string name="location_mode_select_battery_saving">
-        Please select the \"Battery Saving\" mode at Settings > Location
-        (hint: tap the "Mode" item) and return here.
-    </string>
-    <string name="location_mode_device_only_test">Device Only Mode Test</string>
-    <string name="location_mode_device_only_info">
-        This test checks that the Location Mode API is consistent with the
-        Location Provider API when the device is in Device Only location mode.
-    </string>
-    <string name="location_mode_select_device_only">
-        Please select the \"Device Only\" mode at
-        Settings > Location (hint: tap the "Mode" item) and return here.
-    </string>
-    <string name="location_mode_off_test">Location Mode Off Test</string>
-    <string name="location_mode_off_info">
-        This test checks that the Location Mode API is consistent with the
-        Location Provider API when the device is in the Off location mode.
-    </string>
-
-    <string name="location_mode_start_settings">Launch Settings</string>
-    <string name="location_mode_turn_on">
-        Please turn ON location access (the switch at the top of Settings > Location)
-        and return here.
-    </string>
-    <string name="location_mode_turn_off">
-        Please turn OFF location access (the switch at the top of Settings > Location)
-        and return here.
-    </string>
-    <string name="location_mode_secure_gps_on">GPS provider should be ON in secure settings.</string>
-    <string name="location_mode_secure_gps_off">GPS provider should be OFF in secure settings.</string>
-    <string name="location_mode_secure_nlp_on">Network location provider should be ON in secure settings.</string>
-    <string name="location_mode_secure_nlp_off">Network location provider should be OFF in secure settings.</string>
-    <string name="location_mode_manager_gps_on">GPS provider should be ON in LocationManager.</string>
-    <string name="location_mode_manager_gps_off">GPS provider should be OFF in LocationManager.</string>
-    <string name="location_mode_manager_nlp_on">Network location provider should be ON in LocationManager.</string>
-    <string name="location_mode_manager_nlp_off">Network location provider should be OFF in LocationManager.</string>
-
     <string name="cacert_test">CA Cert Notification Test</string>
     <string name="cacert_info">This test checks that when a CA Certificate is installed, the user is notified.</string>
     <string name="cacert_do_something">Do it</string>
@@ -1878,6 +1981,7 @@
         4. Verify that the background color of the remaining image is blue.\n
         5. Verify that the header text says \"CtsVerifier\".\n
         6. Confirm your credentials and verify that the credentials you entered previously work.
+        7. The work app should be launched.
     </string>
     <string name="provisioning_byod_confirm_work_credentials_header">
         CtsVerifier
@@ -1903,7 +2007,7 @@
     <string name="provisioning_byod_recents_verify_redacted_instruction">
         1) Follow the instructions on-screen to set a work password.\n
         2) Turn the screen off and on again, or use the "lock now" button, to lock the work profile.\n
-        3) Open Recents.\n
+        3) Go to home screen and then open Recents.\n
         4) Confirm that this "CTS Verifier" activity is shown in Recents.\n
         5) Confirm that the contents of the activity <b>are</b> hidden.\n
         6) Return to this page and pass the test.
@@ -1922,6 +2026,35 @@
         The work profile still has a separate password. Please remove this before continuing.
     </string>
 
+    <string name="provisioning_byod_keychain">KeyChain test</string>
+    <string name="provisioning_byod_keychain_info_start">
+        In this test, you\'ll verify that keys generated by KeyChain keys are as usable as keys
+        installed into KeyChain and that they can be hidden from users.\n
+        The test has two parts:\n
+        1) Testing that a generated key can be selectable by the user.\n
+        2) Testing that a generated key can be hidden from users.\n
+        \n
+        Tap \"Prepare Test\" button below to begin.\n
+        \n
+        NOTE: A screen lock must be configured for this test. Otherwise, test preparation
+        will fail to generate a key for use by the test.
+    </string>
+    <string name="provisioning_byod_keychain_info_first_test">
+        Once you press \'Go\', a prompt titled \"Choose certificate\" should appear.\n
+        Verify that the list in this dialog has one item, starting with \'cts-verifier-gen\'.
+        Press \'Select\' to select it.\n
+        If the test passes, you\'ll see the text \"Second test ready\" at the bottom.\n
+        \n
+        Press \'Go\'.\n
+    </string>
+    <string name="provisioning_byod_keychain_info_second_test">
+        Once you press \'Run 2nd test\', the same prompt should appear again.\n
+        This time, verify that the title is \"No certificates found\" and the list is empty,
+        then press \'Cancel\'.\n
+        \n
+        Mark the test as passed if the text at the bottom shows \"PASSED (2/2)\"\n
+    </string>
+
     <!-- Strings for DeskClock -->
     <string name="deskclock_tests">Alarms and Timers Tests</string>
     <string name="deskclock_tests_info">
@@ -2305,7 +2438,13 @@
         - Both Personal and Work categories exist.\n
         - \"Remove work profile\" or \"Uninstall\" exists under the Work category.\n
         \n
-        Furthermore, verify that:\n
+        Use the Back button (or navigate back to this app using Recents) to return to this page.
+    </string>
+    <string name="provisioning_byod_user_settings">Profile-aware user settings</string>
+    <string name="provisioning_byod_user_settings_instruction">
+        Please press the Go button to open the Settings page.
+        (If this device has a separate app for work settings, ignore the Go button and open that app manually from the launcher.)\n
+        Navigate to Accounts and confirm that:\n
         - There are two auto-sync options present, one for personal and one for work data (either on the screen or in the overflow menu).\n
         - De-selecting either option prompts a warning dialog.\n
         \n
@@ -2410,9 +2549,9 @@
     <string name="provisioning_byod_nfc_beam_allowed_instruction">
         Please press the Go button to test if Nfc beam can be triggered in the work profile.\n
         \n
-        For the first test, press \"Send manual beam\" to trigger a beam, then bump into another device to send the file. Verify that the file is successfully received.\n
+        For the first test, press \"Send manual beam\" to trigger a beam, then bump into another device to send the tag. Verify that the tag is successfully received.\n
         \n
-        For the second test, press \"Send share intent\" to trigger a beam, then bump into another device to send the file. Verify that the file is successfully received.\n
+        For the second test, press \"Send share intent\" to trigger a beam, then bump into another device to send the tag. Verify that the tag is successfully received.\n
         \n
         Then use the Back button to return to this test and mark accordingly.
     </string>
@@ -2473,57 +2612,52 @@
         3. Go back to the cts-verifier tests using the back button, then mark the test accordingly.\n
     </string>
 
-    <string name="provisioning_byod_turn_off_work">Turn off work mode</string>
-    <string name="provisioning_byod_turn_off_work_info">This test verifies device behaviors when turning off work mode.</string>
+    <string name="provisioning_byod_turn_off_work">Turn off work profile</string>
+    <string name="provisioning_byod_turn_off_work_info">This test verifies device behaviors when turning off work profile.</string>
     <string name="provisioning_byod_turn_off_work_instructions">
         This test verifies the device behavior when work profile is turned off.\n
         Please exercise the following tests in sequence.\n
-        The button below can be used to open the Settings page where you can toggle work mode.\n
+        The button below can be used to open the Settings page where you can toggle work profile.\n
         (If this device has a separate app for work settings, ignore the button and open that app manually from the launcher).\n
     </string>
-    <string name="provisioning_byod_turn_off_work_prepare_button">Open Settings to toggle work mode</string>
+    <string name="provisioning_byod_turn_off_work_prepare_button">Open Settings to toggle work profile</string>
 
     <string name="provisioning_byod_turn_off_work_prepare_notifications">Prepare a work notification</string>
     <string name="provisioning_byod_turn_off_work_prepare_notifications_instruction">
         This is a test setup step.\n
         1. Press the go button to send a work notification.\n
         2. Verify that the notification is displayed and mark this test as passed.\n
-        (Note: in the following test, you will be asked to verify the notification disappears after work mode is turned off.)
+        (Note: in the following test, you will be asked to verify the notification disappears after work profile is turned off.)
     </string>
 
-    <string name="provisioning_byod_turn_off_work_turned_off">Please turn off work mode</string>
-    <string name="provisioning_byod_turn_off_work_turned_off_toast">Open settings to turn off work mode, using the button above.</string>
+    <string name="provisioning_byod_turn_off_work_turned_off">Please turn off work profile</string>
+    <string name="provisioning_byod_turn_off_work_turned_off_toast">Open settings to turn off work profile, using the button above.</string>
 
-    <string name="provisioning_byod_turn_off_work_notifications">Notifications when work mode is off</string>
+    <string name="provisioning_byod_turn_off_work_notifications">Notifications when work profile is off</string>
     <string name="provisioning_byod_turn_off_work_notifications_instruction">
         Verify that the previously-shown work notification has now disappeared.
     </string>
 
-    <string name="provisioning_byod_turn_off_work_icon">Status bar icon when work mode is off</string>
-    <string name="provisioning_byod_turn_off_work_icon_instruction">
-        Now that work mode is off, please verify that the status bar shows an icon indicating that work mode is off.\n
-    </string>
-
-    <string name="provisioning_byod_turn_off_work_launcher">Starting work apps when work mode is off</string>
+    <string name="provisioning_byod_turn_off_work_launcher">Starting work apps when work profile is off</string>
     <string name="provisioning_byod_turn_off_work_launcher_instruction">
-        This test verifies that work applications cannot be started if work mode is off.\n
+        This test verifies that work applications cannot be started if work profile is off.\n
         1. Press home to go to the launcher.\n
         2. Verify that work applications are greyed out.\n
         3. Tap on a work application.\n
         4. Verify that the application does not start.\n
     </string>
 
-    <string name="provisioning_byod_turn_off_work_turned_on">Please turn work mode back on</string>
-    <string name="provisioning_byod_turn_off_work_turned_on_toast">Open settings to turn work mode back on, either manually or using the button above.</string>
+    <string name="provisioning_byod_turn_off_work_turned_on">Please turn work profile back on</string>
+    <string name="provisioning_byod_turn_off_work_turned_on_toast">Open settings to turn work profile back on, either manually or using the button above.</string>
 
-    <string name="provisioning_byod_turn_on_work_icon">Status bar icon when work mode is on</string>
+    <string name="provisioning_byod_turn_on_work_icon">Status bar icon when work profile is on</string>
     <string name="provisioning_byod_turn_on_work_icon_instruction">
-        Now that work mode is back on, please verify that the status bar icon for work mode off is no longer visible.
+        Now that work profile is back on, please verify that the status bar icon for work profile off is no longer visible.
     </string>
 
-    <string name="provisioning_byod_turn_on_work_launcher">Starting work apps when work mode is on</string>
+    <string name="provisioning_byod_turn_on_work_launcher">Starting work apps when work profile is on</string>
     <string name="provisioning_byod_turn_on_work_launcher_instruction">
-        Now that work mode is back on, please go to the launcher and verify that you can start a work application.
+        Now that work profile is back on, please go to the launcher and verify that you can start a work application.
     </string>
 
     <string name="provisioning_byod_organization_info">Organization Info</string>
@@ -2551,12 +2685,21 @@
         1. Press the Go button to set a new password for the personal side.\n
         2. Lock and unlock the screen to verify that the personal side password was set correctly.\n
     </string>
+    <string name="provisioning_byod_work_profile_widget">Work profile widget</string>
+    <string name="provisioning_byod_work_profile_widget_info">Verify that work profile widget can be added into launcher</string>
+    <string name="provisioning_byod_work_profile_widget_description">
+        This test verifies that the widget in work profile can be added into Launcher.\n
+
+        1. Go to home screen.\n
+        2. Add the widget titled \"CTS Verifier\" and badged with work profile briefcase to the home screen.\n
+        3. If you can add the widget to the home screen, please select \"pass\". Otherwise, select \"fail\".
+    </string>
 
     <!-- Strings for DeviceOwnerNegativeTestActivity -->
     <string name="negative_device_owner">No Device Owner Tests</string>
     <string name="device_owner_negative_category">No Device Owner Tests</string>
     <string name="device_owner_provisioning_negative">Device owner provisioning</string>
-    <string name="device_owner_provisioning_negative_info">The device owner provisioning test verifies that setting up a corporate owned device can only be done on a factory reset device.\n\nPlease click the "Start provisioning" button, and when you see a warning dialog telling the device can\'t be set up, select "pass". Otherwise, select "fail".</string>
+    <string name="device_owner_provisioning_negative_info">The device owner provisioning test verifies that setting up a corporate owned device can only be done on a factory reset device.\n\nPlease click the "Start provisioning" button, and when you see a warning dialog telling the device is already set up, select "pass". Otherwise, select "fail".</string>
     <string name="start_device_owner_provisioning_button">Start provisioning</string>
     <string name="enterprise_privacy_quick_settings_negative">Quick settings disclosure</string>
     <string name="enterprise_privacy_quick_settings_negative_info">
@@ -2609,8 +2752,9 @@
     <string name="set_device_owner_button_label">Set up device owner</string>
     <string name="set_device_owner_dialog_title">Set up device owner</string>
     <string name="set_device_owner_dialog_text">
+            This test requires CtsEmptyDeviceOwner.apk to be installed on the device.
             Please set the device owner by enabling USB debugging on the device and issuing the following command on the host:\n
-            adb shell dpm set-device-owner \'com.android.cts.verifier/com.android.cts.verifier.managedprovisioning.DeviceAdminTestReceiver\'
+            adb shell dpm set-device-owner com.android.cts.emptydeviceowner/.EmptyDeviceAdmin
     </string>
     <string name="device_owner_remove_device_owner_test">Remove device owner</string>
     <string name="device_owner_remove_device_owner_test_info">
@@ -2685,6 +2829,150 @@
     </string>
     <string name="device_owner_disable_keyguard_button">Disable keyguard</string>
     <string name="device_owner_reenable_keyguard_button">Reenable keyguard</string>
+    <string name="device_owner_lock_task_ui_test">LockTask UI</string>
+    <string name="device_owner_lock_task_ui_test_info">
+            The following tests verify the configurable UI during LockTask, a special mode that
+            prevents the user from leaving the current application.\n\n
+            Please make sure the lock screen is turned on before the test. Press the button below to
+            start LockTask mode. Then mark each item as \'pass\' or \'fail\' according to the
+            instructions.\n\n
+            Finally, execute the last test item to leave LockTask mode.
+    </string>
+    <string name="start_lock_task_button_label">Start LockTask mode</string>
+    <string name="device_owner_lock_task_ui_default_test">Default LockTask UI</string>
+    <string name="device_owner_lock_task_ui_default_test_info">
+            Press the button below to reset to default LockTask UI.
+            Observe the following UI restrictions. Mark the test as \'pass\' only if ALL of the
+            requirements below are met.\n\n
+            1) Nothing is shown in the status bar, including notification icons, connectivity icons,
+            battery status, clock, etc.\n
+            2) The status bar can\'t be expanded. That is, the \'swipe-down\' gesture doesn\'t work
+            for the status bar.\n
+            3) The software Home button is hidden.\n
+            4) The software Overview button is hidden and the Overview gesture (swipe-up) does not
+            work.\n
+            5) Long-press the power button. The power button menu, which usually shows the power-off
+            button, etc., isn\'t shown.\n
+            6) Press the power button to turn off the screen, and press it again to turn the screen
+            back on. Lock screen shouldn\'t be shown.\n
+            7) The assist gesture isn\'t available.
+    </string>
+    <string name="device_owner_lock_task_ui_system_info_test">Enable system info</string>
+    <string name="device_owner_lock_task_ui_system_info_test_info">
+            Press the button below to enable system info. Observe the system info area of the status
+            bar is now enabled. This includes the clock, connectivity info, battery info, etc.\n\n
+            The rest of the UI restrictions should still apply:\n
+            1) Notification icons are still hidden on the status bar.\n
+            2) The status bar can\'t be expanded. That is, the \'swipe-down\' gesture doesn\'t work
+            for the status bar.\n
+            3) The software Home button is hidden.\n
+            4) The software Overview button is hidden and the Overview gesture (swipe-up) does not
+            work.\n
+            5) Long-press the power button. The power button menu, which usually shows the power-off
+            button, etc., isn\'t shown.\n
+            6) Press the power button to turn off the screen, and press it again to turn the screen
+            back on. Lock screen shouldn\'t be shown.\n
+            7) The assist gesture isn\'t available.\n\n
+            Mark the test as \'pass\' only if ALL of the above requirements are met.
+    </string>
+    <string name="device_owner_lock_task_ui_notifications_test">Enable notifications</string>
+    <string name="device_owner_lock_task_ui_notifications_test_info">
+            Press the button below to enable notifications. Observe the notification icons on the
+            status bar are now enabled and the Home button is shown. The status bar can also be
+            expanded to show the notifications. However, all Settings UI should remain invisible,
+            including Quick Settings and any link to the Settings app.\n\n
+            The rest of the UI restrictions should still apply:\n
+            1) System info area is still hidden on the status bar.\n
+            2) The software Overview button is hidden and the Overview gesture (swipe-up) does not
+            work.\n
+            3) Holding the Home button and swiping to the right does not bring up other tasks.\n
+            4) Long-press the power button. The power button menu, which usually shows the power-off
+            button, etc., isn\'t shown.\n
+            5) Press the power button to turn off the screen, and press it again to turn the screen
+            back on. Lock screen shouldn\'t be shown.\n
+            6) The assist gesture isn\'t available.\n\n
+            Mark the test as \'pass\' only if ALL of the above requirements are met.
+    </string>
+    <string name="device_owner_lock_task_ui_home_test">Enable Home button</string>
+    <string name="device_owner_lock_task_ui_home_test_info">
+            Press the button below to enable the Home button. Observe the Home button is now
+            enabled.\n\n
+            The rest of the UI restrictions should still apply:\n
+            1) Nothing is shown in the status bar, including notification icons, connectivity icons,
+            battery status, clock, etc.\n
+            2) The status bar can\'t be expanded. That is, the \'swipe-down\' gesture doesn\'t work
+            for the status bar.\n
+            3) The software Overview button is hidden and the Overview gesture (swipe-up) does not
+            work.\n
+            4) Holding the Home button and swiping to the right does not bring up other tasks.\n
+            5) Long-press the power button. The power button menu, which usually shows the power-off
+            button, etc., isn\'t shown.\n
+            6) Press the power button to turn off the screen, and press it again to turn the screen
+            back on. Lock screen shouldn\'t be shown.\n
+            7) The assist gesture isn\'t available.\n\n
+            Mark the test as \'pass\' only if ALL of the above requirements are met.
+    </string>
+    <string name="device_owner_lock_task_ui_recents_test">Enable Overview button</string>
+    <string name="device_owner_lock_task_ui_recents_test_info">
+            Press the button below to enable the Overview button. Observe the Home button is now
+            enabled. Press the Overview button or perform the Overview gesture
+            (swipe up) and verify the Overview view can be opened.\n\n
+            The rest of the UI restrictions should still apply:\n
+            1) Nothing is shown in the status bar, including notification icons, connectivity icons,
+            battery status, clock, etc.\n
+            2) The status bar can\'t be expanded. That is, the \'swipe-down\' gesture doesn\'t work
+            for the status bar.\n
+            3) Long-press the power button. The power button menu, which usually shows the power-off
+            button, etc., isn\'t shown.\n
+            4) Press the power button to turn off the screen, and press it again to turn the screen
+            back on. Lock screen shouldn\'t be shown.\n
+            5) The assist gesture isn\'t available.\n\n
+            Mark the test as \'pass\' only if ALL of the above requirements are met.
+    </string>
+    <string name="device_owner_lock_task_ui_global_actions_test">Enable global actions</string>
+    <string name="device_owner_lock_task_ui_global_actions_test_info">
+            Press the button below to enable global actions (a.k.a. power button menu). Long-press
+            the power button and verify a menu containing power-off and restart buttons is shown.
+            This menu can\'t contain any UI that allows the user to change system settings (such as
+            airplane mode switch) or access the Settings app.\n\n
+            The rest of the UI restrictions should still apply:\n
+            1) Nothing is shown in the status bar, including notification icons, connectivity icons,
+            battery status, clock, etc.\n
+            2) The status bar can\'t be expanded. That is, the \'swipe-down\' gesture doesn\'t work
+            for the status bar.\n
+            3) The software Home button is hidden.\n
+            4) The software Overview button is hidden and the Overview gesture (swipe-up) does not
+            work.\n
+            5) Press the power button to turn off the screen, and press it again to turn the screen
+            back on. Lock screen shouldn\'t be shown.\n
+            6) The assist gesture isn\'t available.\n\n
+            Mark the test as \'pass\' only if ALL of the above requirements are met.
+    </string>
+    <string name="device_owner_lock_task_ui_keyguard_test">Enable keyguard</string>
+    <string name="device_owner_lock_task_ui_keyguard_test_info">
+            Press the button below to enable keyguard. Press the power button to turn off the screen
+            and press it again to turn the screen back on. Verify that the lock screen is shown.\n\n
+            The rest of the UI restrictions should still apply, both on the lock screen and after
+            the lock screen is dismissed:\n
+            1) Nothing is shown in the status bar, including notification icons, connectivity icons,
+            battery status, clock, etc.\n
+            2) The status bar can\'t be expanded. That is, the \'swipe-down\' gesture doesn\'t work
+            for the status bar, even on the lock screen.\n
+            3) The software Home button is hidden.\n
+            4) The software Overview button is hidden and the Overview gesture (swipe-up) does not
+            work.\n
+            5) Long-press the power button. The power button menu, which usually shows the power-off
+            button, etc., isn\'t shown.\n
+            6) The assist gesture isn\'t available.\n\n
+            Mark the test as \'pass\' only if ALL of the above requirements are met.
+    </string>
+    <string name="device_owner_lock_task_ui_stop_lock_task_test">Stop LockTask mode</string>
+    <string name="device_owner_lock_task_ui_stop_lock_task_test_info">
+            Press the button below to exit LockTask mode.\n\n
+            Observe that the UI has returned to the normal, unrestricted state, and is no longer
+            subject to any LockTask restriction.\n\n
+            Mark the test as \'pass\' or \'fail\' accordingly.
+    </string>
     <string name="device_owner_lockscreen_secure">Please remove lockscreen password</string>
     <string name="device_profile_owner_permission_lockdown_test">Permissions lockdown</string>
     <string name="device_profile_owner_permission_lockdown_test_instructions">
@@ -2702,8 +2990,8 @@
     <string name="device_owner_disallow_usb_file_transfer_test_info">
             Please press below button to set the \"disallow USB file transfer\" restriction.\n
             If a USB notification appears, open the notification and check that the
-            \"Transfer files (MTP)\" and \"Transfer photos (PTP)\" cannot be selected and trigger a
-            support message when trying to select them.\n
+            \"Transfer files (MTP)\" and \"Transfer photos (PTP)\" options either are not displayed,
+            or they trigger a support message when trying to select them.\n
             Check if you can mount the device as a USB drive on your desktop computer. The test is
             successful if you cannot mount the device, and files from your phone cannot be
             downloaded through USB.\n
@@ -2765,6 +3053,16 @@
         \n
         Use the Back button to return to this page.
     </string>
+    <string name="device_owner_disallow_ambient_display">Disallow ambient display</string>
+    <string name="device_owner_disallow_ambient_display_info">
+        Please press the Set restriction button to set the user restriction.
+        Then press Go to open the Display page in Settings.
+        Mark this test as passed if one of the following:\n\n
+        - There is no ambient display setting in Display Settings.\n
+        - Ambient display setting is disabled with an info icon on it. Clicking on it triggers a support dialog.\n
+        \n
+        Use the Back button to return to this page.
+    </string>
     <string name="device_owner_disallow_data_roaming">Disallow data roaming</string>
     <string name="device_owner_disallow_data_roaming_info">
         Device should have a sim card to perform this test.
@@ -2948,7 +3246,7 @@
         3. Verify that performing the following action will trigger a support dialog:\n
            <xliff:g id="user_action" example="Adding an account">%2$s</xliff:g>.\n
         4. Verify that the support dialog displays the short support message set earlier.\n
-        5. Verify that clicking the "More details" link will redirect to Device administrators
+        5. Verify that clicking the "Learn more" link will redirect to Device administrators
            page in Settings app which displays the long support message set earlier.\n
     </string>
     <string name="user_restriction_set_step">
@@ -2958,10 +3256,20 @@
     <string name="disallow_add_user_action">Adding a new user</string>
     <string name="disallow_adjust_volume">Disallow adjust volume</string>
     <string name="disallow_adjust_volume_action">Adjusting the volume</string>
+    <string name="disallow_config_date_time">Disallow config date and time settings</string>
+    <string name="disallow_config_date_time_action">Configuring auto time, time, auto date or date</string>
+    <string name="disallow_config_location">Disallow config location</string>
+    <string name="disallow_config_location_action">Enabling or disabling location in settings or quick settings</string>
+    <string name="disallow_airplane_mode">Disallow airplane mode</string>
+    <string name="disallow_airplane_mode_action">Toggling airplane mode switch bar or changing airplane mode state in quick settings</string>
+    <string name="disallow_config_screen_timeout">Disallow config sleep options settings</string>
+    <string name="disallow_config_screen_timeout_action">Configuring sleep options in Display or Battery page.</string>
+    <string name="disallow_config_brightness">Disallow config brightness settings</string>
+    <string name="disallow_config_brightness_action">Configuring brightness level or adaptive brightness in Display or Battery page, or toggling brightness slider in quick settings</string>
     <string name="disallow_apps_control">Disallow controlling apps</string>
     <string name="disallow_apps_control_action">DISABLE/UNINSTALL/FORCE STOP-ing any app in the managed device/profile other than CtsVerifier</string>
     <string name="disallow_config_cell_broadcasts">Disallow config cell broadcasts</string>
-    <string name="disallow_config_cell_broadcasts_action">Configuring cell broadcasts</string>
+    <string name="disallow_config_cell_broadcasts_action">Configuring emergency alerts(cell broadcasts)</string>
     <string name="disallow_config_credentials">Disallow config credentials</string>
     <string name="disallow_config_credentials_action">Configuring user credentials</string>
     <string name="disallow_config_mobile_networks">Disallow config mobile networks</string>
@@ -2988,13 +3296,39 @@
     <string name="disallow_outgoing_beam">Disallow outgoing beam</string>
     <string name="disallow_outgoing_beam_action">Switching on android beam</string>
     <string name="disallow_remove_user">Disallow remove user</string>
-    <string name="disallow_remove_user_action">Removing other users (please create a user and attempt to remove it to verify)</string>
+    <string name="device_owner_disallow_remove_user_info">
+        Please press \'Create uninitialized user\' to create a user that is not set up. Then press the
+        \'Set restriction\' button to set the user restriction. Then press \'Go\' to open \'Settings\',
+        and manually find and open \'Multiple users\' setting. \n\n
+
+        Mark this test as passed if:\n\n
+        - The uninitialized user cannot be removed.\n
+        - \'Remove user\' option is disabled with an info icon on it. Clicking on it triggers a support dialog.\n\n
+
+        Use the Back button to return to this page.
+    </string>
+    <string name="managed_user_disallow_remove_user_info">
+        Please press the \'Set restriction\' button to set the user restriction.
+        Then press \'Go\' to open \'Settings\', and manually find and open \'Multiple users\' setting. \n\n
+
+        Mark this test as passed if one of the following conditions is met:\n\n
+        - \'Remove user\' option is disabled with an info icon on it. Clicking on it triggers a support dialog.\n
+        - \'Remove user\' option cannot be found.\n \n
+
+        Use the Back button to return to this page.
+    </string>
+    <string name="device_owner_disallow_remove_user_create_user">Create uninitialized user</string>
     <string name="disallow_remove_managed_profile">Disallow remove managed profile</string>
     <string name="disallow_remove_managed_profile_action">Removing the work profile. It shouldn\'t be possible neither from the Accounts screen nor the Device Administrators screen (after selecting the Device Administrator that corresponds to the badged version of \"CTS Verifier\")</string>
     <string name="disallow_share_location">Disallow share location</string>
     <string name="disallow_share_location_action">Turning on location sharing</string>
     <string name="disallow_uninstall_apps">Disallow uninstall apps</string>
-    <string name="disallow_uninstall_apps_action">Uninstalling applications other CtsVerifier</string>
+    <string name="disallow_uninstall_apps_action">
+        a. If testing in work profile: uninstall applications from the work profile (badged applications) other than CtsVerifier and system apps. \n
+        b. Otherwise: uninstall applications other than CtsVerifier and system apps
+    </string>
+    <string name="disallow_unified_challenge">Disallow unified challenge</string>
+    <string name="disallow_unified_challenge_action">Setting one lock for both personal and work profiles. IMPORTANT: Separate work lock should be set prior to this test in Set work lock test</string>
     <string name="disallow_keyguard_unredacted_notifications">Disallow lockscreen unredacted notification</string>
     <string name="disallow_keyguard_unredacted_notifications_set_step">Disallow unredacted notifications when device is locked by turning on the switch below</string>
     <string name="disallow_keyguard_unredacted_notifications_action">Selecting show all notification content when device is locked</string>
@@ -3354,6 +3688,69 @@
     <string name="comp_provision_profile_dialog_title">Provision work profile</string>
     <string name="comp_provision_profile_dialog_text">Press the OK button to start the managed provisioning flow, and complete the flow to create a work profile</string>
 
+    <string name="managed_user_test">Managed User</string>
+    <string name="managed_user_positive_tests">Managed User positive tests</string>
+    <string name="managed_user_positive_tests_instructions">
+        The positive managed user tests verify policies on a managed user created by a device owner.
+        \n
+        Press Go button to create a managed user, and you will be switched to the managed user
+        automatically. Dismiss the keyguard and a \'Managed User Tests\' should launch.\n
+        Follow the test instructions and press \'pass\' or \'fail\' to return to this screen.\n
+    </string>
+    <string name="managed_user_positive_tests_info">
+        The positive managed user tests verify policies on a managed user created by a device owner.
+        Proceed to the test cases, then press \'pass\' or \'fail\' to finish this test.
+    </string>
+    <string name="managed_user_positive_category">Managed User Tests</string>
+    <string name="managed_user_check_managed_user_test">Check affiliated profile owner</string>
+    <string name="managed_user_incorrect_managed_user">Missing or incorrect affiliated profile owner: CTSVerifier is not affilaited PO!</string>
+
+    <string name="device_owner_disallow_user_switch">Disallow user switch</string>
+    <string name="device_owner_disallow_user_switch_info">
+        Press \'Create uninitialized user\' to create a user that is not setup.
+        Then press Set restriction button to set the user restriction.
+        Then press Go to open the Settings, and manually find and open user settings section.
+        Confirm that:\n
+        \n
+        - Selecting uninitialized user should not trigger user switch.\n
+        \n
+        In additional, if quick settings is available, confirm that user switcher is hidden or
+        disabled.
+        Use the Back button to return to this page.
+    </string>
+    <string name="device_owner_disallow_user_switch_create_user">Create uninitialized user</string>
+
+    <string name="device_owner_user_switcher_message">User switcher message</string>
+    <string name="device_owner_user_switcher_message_info">
+        1. Please press the \'With user switcher message\' button to set the user switcher message.
+        You will then be automatically switched to a secondary user. If a user switcher dialog shows
+        up, it should read \'Start user session\'. Wait until you are automatically switched back to
+        primary, if a user switcher dialog shows up, it should read \'End user session\'.
+
+        \n
+        2. Please press the \'Without user switcher message\' button to clear the user switcher
+        message. You will then be automatically switched to a secondary user. If a user switcher
+        dialog shows up, it should read \'Switching to managed user\'. Wait until you are
+        automatically switched back to primary, if a user switcher dialog shows up, it should read
+        \'Switching to (name of primary user)\'.
+    </string>
+    <string name="device_owner_with_user_switcher_message">With user switcher message</string>
+    <string name="device_owner_without_user_switcher_message">Without user switcher message</string>
+
+    <string name="device_owner_enable_logout">Logout</string>
+    <string name="device_owner_enable_logout_info">
+        Please press the Go button to enable logout. You will then be switched to a newly created
+        user.
+        Look for a way to logout the current user without unlocking the lock screen. The control is
+        usually named \'End session\'.\n
+        The location may vary depending on manufacturer, typical locations are:\n
+        - In power button menu by long pressing power button.\n
+        - On the lock screen.\n
+        \n
+        When successfully logout and switched back to primary user, confirm that the logout control
+        is not available in primary user.
+    </string>
+
     <!-- Strings for JobScheduler Tests -->
     <string name="js_test_description">This test is mostly automated, but requires some user interaction. You can pass this test once the list items below are checked.</string>
 
diff --git a/apps/CtsVerifier/res/xml/device_admin_byod.xml b/apps/CtsVerifier/res/xml/device_admin_byod.xml
index b1ba372..2d33459 100644
--- a/apps/CtsVerifier/res/xml/device_admin_byod.xml
+++ b/apps/CtsVerifier/res/xml/device_admin_byod.xml
@@ -16,6 +16,7 @@
 
 <!-- BEGIN_INCLUDE(meta_data) -->
 <device-admin xmlns:android="http://schemas.android.com/apk/res/android">
+    <support-transfer-ownership/>
     <uses-policies>
         <limit-password />
         <watch-login />
diff --git a/apps/CtsVerifier/res/xml/filepaths.xml b/apps/CtsVerifier/res/xml/filepaths.xml
index 2d555a2..9993951 100644
--- a/apps/CtsVerifier/res/xml/filepaths.xml
+++ b/apps/CtsVerifier/res/xml/filepaths.xml
@@ -1,3 +1,4 @@
 <paths xmlns:android="http://schemas.android.com/apk/res/android">
     <files-path path="images/" name="images" />
+    <files-path path="debug" name="debug/" />
 </paths>
diff --git a/apps/CtsVerifier/res/xml/network_security_config.xml b/apps/CtsVerifier/res/xml/network_security_config.xml
new file mode 100644
index 0000000..c15c09c
--- /dev/null
+++ b/apps/CtsVerifier/res/xml/network_security_config.xml
@@ -0,0 +1,4 @@
+<?xml version="1.0" encoding="utf-8"?>
+<network-security-config>
+    <base-config cleartextTrafficPermitted="true"/>
+</network-security-config>
diff --git a/apps/CtsVerifier/res/xml/ultrasound_line_formatter_median.xml b/apps/CtsVerifier/res/xml/ultrasound_line_formatter_median.xml
index 9c6de77..dd18236 100644
--- a/apps/CtsVerifier/res/xml/ultrasound_line_formatter_median.xml
+++ b/apps/CtsVerifier/res/xml/ultrasound_line_formatter_median.xml
@@ -3,4 +3,4 @@
         linePaint.strokeWidth="3dp"
         linePaint.color="#AA0000"
         vertexPaint.color="#770000"
-        fillPaint.color="#00000000" />
+        fillPaint.color="#770000" />
diff --git a/apps/CtsVerifier/res/xml/ultrasound_line_formatter_noise.xml b/apps/CtsVerifier/res/xml/ultrasound_line_formatter_noise.xml
index 8fb236e..0f27503 100644
--- a/apps/CtsVerifier/res/xml/ultrasound_line_formatter_noise.xml
+++ b/apps/CtsVerifier/res/xml/ultrasound_line_formatter_noise.xml
@@ -3,4 +3,4 @@
         linePaint.strokeWidth="2dp"
         linePaint.color="#777777"
         vertexPaint.color="777777"
-        fillPaint.color="#00000000" />
+        fillPaint.color="#770000" />
diff --git a/apps/CtsVerifier/res/xml/ultrasound_line_formatter_pass.xml b/apps/CtsVerifier/res/xml/ultrasound_line_formatter_pass.xml
index 9a6c29a..011f20b 100644
--- a/apps/CtsVerifier/res/xml/ultrasound_line_formatter_pass.xml
+++ b/apps/CtsVerifier/res/xml/ultrasound_line_formatter_pass.xml
@@ -3,4 +3,4 @@
         linePaint.strokeWidth="2dp"
         linePaint.color="#007700"
         vertexPaint.color="#007700"
-        fillPaint.color="#00000000" />
+        fillPaint.color="#880000" />
diff --git a/apps/CtsVerifier/res/xml/ultrasound_line_formatter_trials.xml b/apps/CtsVerifier/res/xml/ultrasound_line_formatter_trials.xml
index 3f9ffc2..ce09dfb 100644
--- a/apps/CtsVerifier/res/xml/ultrasound_line_formatter_trials.xml
+++ b/apps/CtsVerifier/res/xml/ultrasound_line_formatter_trials.xml
@@ -1,6 +1,7 @@
 <?xml version="1.0" encoding="utf-8"?>
 <config
-        linePaint.strokeWidth="1dp"
-        linePaint.color="#AAAAAA"
-        vertexPaint.color="#777777"
-        fillPaint.color="#00000000" />
+        linePaint.strokeWidth="3dp"
+        linePaint.color="#00AA00"
+        vertexPaint.color="#007700"
+        fillPaint.color="#00ff00"
+        pointLabelFormatter.textPaint.color="#FFFFFF"/>
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/AbstractTestListActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/AbstractTestListActivity.java
index 3132219..70a9593 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/AbstractTestListActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/AbstractTestListActivity.java
@@ -21,7 +21,9 @@
 import android.app.ListActivity;
 import android.content.Intent;
 import android.content.res.Configuration;
+import android.graphics.Rect;
 import android.os.Bundle;
+import android.view.MotionEvent;
 import android.view.View;
 import android.view.Window;
 import android.widget.ListView;
@@ -29,13 +31,26 @@
 /** {@link ListActivity} that displays a list of manual tests. */
 public abstract class AbstractTestListActivity extends ListActivity {
     private static final int LAUNCH_TEST_REQUEST_CODE = 9001;
+    //An invalid value which smaller than the edge of coordinate on the screen.
+    private static final float DEFAULT_CLICKED_COORDINATE = -1;
 
     protected TestListAdapter mAdapter;
+    // Start time of test case.
+    protected long mStartTime;
+    // End time of test case.
+    protected long mEndTime;
+    // X-axis of clicked coordinate when entering a test case.
+    protected float mCoordinateX;
+    // Y-axis of clicked coordinate when entering a test case.
+    protected float mCoornidateY;
+    // Whether test case was executed through automation.
+    protected boolean mIsAutomated;
 
     protected void setTestListAdapter(TestListAdapter adapter) {
         mAdapter = adapter;
         setListAdapter(mAdapter);
         mAdapter.loadTestResults();
+        setOnTouchListenerToListView();
     }
 
     private Intent getIntent(int position) {
@@ -73,15 +88,33 @@
 
     protected void handleLaunchTestResult(int resultCode, Intent data) {
         if (resultCode == RESULT_OK) {
+            // If subtest didn't set end time, set current time
+            if (mEndTime == 0) {
+                mEndTime = System.currentTimeMillis();
+            }
             TestResult testResult = TestResult.fromActivityResult(resultCode, data);
+            testResult.getHistoryCollection().add(
+                testResult.getName(), mStartTime, mEndTime, mIsAutomated);
             mAdapter.setTestResult(testResult);
         }
+        // Reset end time to avoid keeping same end time in retry.
+        mEndTime = 0;
+        // Reset mIsAutomated flag to false
+        mIsAutomated = false;
+        // Reset clicked coordinate.
+        mCoordinateX = DEFAULT_CLICKED_COORDINATE;
+        mCoornidateY = DEFAULT_CLICKED_COORDINATE;
     }
 
     /** Launch the activity when its {@link ListView} item is clicked. */
     @Override
     protected final void onListItemClick(ListView listView, View view, int position, long id) {
         super.onListItemClick(listView, view, position, id);
+        mStartTime = System.currentTimeMillis();
+        //Check whether the clicked coordinate is consistent with the center of the clicked Object.
+        Rect rect = new Rect();
+        view.getGlobalVisibleRect(rect);
+        mIsAutomated = (mCoordinateX == rect.centerX()) && (mCoornidateY == rect.centerY());
         handleItemClick(listView, view, position, id);
     }
 
@@ -90,4 +123,23 @@
         Intent intent = getIntent(position);
         startActivityForResult(intent, LAUNCH_TEST_REQUEST_CODE);
     }
+
+    /** Set OnTouchListener to ListView to get the clicked Coordinate*/
+    protected void setOnTouchListenerToListView() {
+        getListView().setOnTouchListener(null);
+        getListView().setOnTouchListener(new View.OnTouchListener(){
+            @Override
+            public boolean onTouch(View v, MotionEvent event) {
+                if (event.getAction() == MotionEvent.ACTION_UP) {
+                    mCoordinateX = event.getRawX();
+                    mCoornidateY = event.getRawY();
+                } else {
+                    // Reset clicked coordinate.
+                    mCoordinateX = DEFAULT_CLICKED_COORDINATE;
+                    mCoornidateY = DEFAULT_CLICKED_COORDINATE;
+                }
+                return false;
+            }
+        });
+    }
 }
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/DialogTestListActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/DialogTestListActivity.java
index aa6eaba..bed5a77 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/DialogTestListActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/DialogTestListActivity.java
@@ -224,7 +224,7 @@
         // Bundle result in an intent to feed into handleLaunchTestResult
         Intent resultIntent = new Intent();
         TestResult.addResultData(resultIntent, result, test.testName, /* testDetails */ null,
-                /* reportLog */ null);
+                /* reportLog */ null, null);
         handleLaunchTestResult(RESULT_OK, resultIntent);
         getListView().smoothScrollToPosition(mCurrentTestPosition + 1);
     }
@@ -233,7 +233,7 @@
         // Bundle result in an intent to feed into handleLaunchTestResult
         Intent resultIntent = new Intent();
         TestResult.addResultData(resultIntent, result, testName, /* testDetails */ null,
-                /* reportLog */ null);
+                /* reportLog */ null, null);
         handleLaunchTestResult(RESULT_OK, resultIntent);
         getListView().smoothScrollToPosition(mCurrentTestPosition + 1);
     }
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/PassFailButtons.java b/apps/CtsVerifier/src/com/android/cts/verifier/PassFailButtons.java
index 4a8004a..7776d27 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/PassFailButtons.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/PassFailButtons.java
@@ -36,6 +36,10 @@
 import android.widget.ImageButton;
 import android.widget.Toast;
 
+import java.util.List;
+import java.util.stream.Collectors;
+import java.util.stream.IntStream;
+
 /**
  * {@link Activity}s to handle clicks to the pass and fail buttons of the pass fail buttons layout.
  *
@@ -99,14 +103,21 @@
 
         /** @return A {@link ReportLog} that is used to record test metric data. */
         ReportLog getReportLog();
+
+        /**
+         * @return A {@link TestResultHistoryCollection} that is used to record test execution time.
+         */
+        TestResultHistoryCollection getHistoryCollection();
     }
 
     public static class Activity extends android.app.Activity implements PassFailActivity {
         private WakeLock mWakeLock;
         private final ReportLog reportLog;
+        private final TestResultHistoryCollection mHistoryCollection;
 
         public Activity() {
            this.reportLog = new CtsVerifierReportLog();
+           this.mHistoryCollection = new TestResultHistoryCollection();
         }
 
         @Override
@@ -160,19 +171,25 @@
         @Override
         public void setTestResultAndFinish(boolean passed) {
             PassFailButtons.setTestResultAndFinishHelper(
-                    this, getTestId(), getTestDetails(), passed, getReportLog());
+                    this, getTestId(), getTestDetails(), passed, getReportLog(),
+                    getHistoryCollection());
         }
 
         @Override
         public ReportLog getReportLog() { return reportLog; }
+
+        @Override
+        public TestResultHistoryCollection getHistoryCollection() { return mHistoryCollection; }
     }
 
     public static class ListActivity extends android.app.ListActivity implements PassFailActivity {
 
         private final ReportLog reportLog;
+        private final TestResultHistoryCollection mHistoryCollection;
 
         public ListActivity() {
             this.reportLog = new CtsVerifierReportLog();
+            this.mHistoryCollection = new TestResultHistoryCollection();
         }
 
         @Override
@@ -208,11 +225,15 @@
         @Override
         public void setTestResultAndFinish(boolean passed) {
             PassFailButtons.setTestResultAndFinishHelper(
-                    this, getTestId(), getTestDetails(), passed, getReportLog());
+                    this, getTestId(), getTestDetails(), passed, getReportLog(),
+                    getHistoryCollection());
         }
 
         @Override
         public ReportLog getReportLog() { return reportLog; }
+
+        @Override
+        public TestResultHistoryCollection getHistoryCollection() { return mHistoryCollection; }
     }
 
     public static class TestListActivity extends AbstractTestListActivity
@@ -257,12 +278,27 @@
         @Override
         public void setTestResultAndFinish(boolean passed) {
             PassFailButtons.setTestResultAndFinishHelper(
-                    this, getTestId(), getTestDetails(), passed, getReportLog());
+                    this, getTestId(), getTestDetails(), passed, getReportLog(),
+                    getHistoryCollection());
         }
 
         @Override
         public ReportLog getReportLog() { return reportLog; }
 
+        /**
+         * Get existing test history to aggregate.
+         */
+        @Override
+        public TestResultHistoryCollection getHistoryCollection() {
+            List<TestResultHistoryCollection> histories =
+                IntStream.range(0, mAdapter.getCount())
+                .mapToObj(mAdapter::getHistoryCollection)
+                .collect(Collectors.toList());
+            TestResultHistoryCollection historyCollection = new TestResultHistoryCollection();
+            historyCollection.merge(getTestId(), histories);
+            return historyCollection;
+        }
+
         public void updatePassButton() {
             getPassButton().setEnabled(mAdapter.allTestsPassed());
         }
@@ -274,7 +310,7 @@
             @Override
             public void onClick(View target) {
                 setTestResultAndFinish(activity, activity.getTestId(), activity.getTestDetails(),
-                        activity.getReportLog(), target);
+                        activity.getReportLog(), activity.getHistoryCollection(), target);
             }
         };
 
@@ -399,7 +435,8 @@
 
     /** Set the test result corresponding to the button clicked and finish the activity. */
     protected static void setTestResultAndFinish(android.app.Activity activity, String testId,
-            String testDetails, ReportLog reportLog, View target) {
+            String testDetails, ReportLog reportLog, TestResultHistoryCollection historyCollection,
+            View target) {
         boolean passed;
         if (target.getId() == R.id.pass_button) {
             passed = true;
@@ -409,16 +446,17 @@
             throw new IllegalArgumentException("Unknown id: " + target.getId());
         }
 
-        setTestResultAndFinishHelper(activity, testId, testDetails, passed, reportLog);
+        setTestResultAndFinishHelper(activity, testId, testDetails, passed, reportLog, historyCollection);
     }
 
     /** Set the test result and finish the activity. */
     protected static void setTestResultAndFinishHelper(android.app.Activity activity, String testId,
-            String testDetails, boolean passed, ReportLog reportLog) {
+            String testDetails, boolean passed, ReportLog reportLog,
+            TestResultHistoryCollection historyCollection) {
         if (passed) {
-            TestResult.setPassedResult(activity, testId, testDetails, reportLog);
+            TestResult.setPassedResult(activity, testId, testDetails, reportLog, historyCollection);
         } else {
-            TestResult.setFailedResult(activity, testId, testDetails, reportLog);
+            TestResult.setFailedResult(activity, testId, testDetails, reportLog, historyCollection);
         }
 
         activity.finish();
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/ReportExporter.java b/apps/CtsVerifier/src/com/android/cts/verifier/ReportExporter.java
index 1629e1b..29136a5 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/ReportExporter.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/ReportExporter.java
@@ -59,14 +59,14 @@
     private static final String SUITE_PLAN = "verifier";
     private static final String SUITE_BUILD = "0";
 
-    private static final long START_MS = System.currentTimeMillis();
-    private static final long END_MS = START_MS;
-
     private static final String REPORT_DIRECTORY = "verifierReports";
     private static final String ZIP_EXTENSION = ".zip";
 
     protected static final Logger LOG = Logger.getLogger(ReportExporter.class.getName());
 
+    private final long START_MS = System.currentTimeMillis();
+    private final long END_MS = START_MS;
+
     private final Context mContext;
     private final TestListAdapter mAdapter;
 
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/TestListAdapter.java b/apps/CtsVerifier/src/com/android/cts/verifier/TestListAdapter.java
index d9ea84f..17efb22 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/TestListAdapter.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/TestListAdapter.java
@@ -17,6 +17,7 @@
 package com.android.cts.verifier;
 
 import com.android.compatibility.common.util.ReportLog;
+import com.android.compatibility.common.util.TestResultHistory;
 
 import android.content.ContentResolver;
 import android.content.Context;
@@ -74,6 +75,9 @@
     /** Map from test name to {@link ReportLog}. */
     private final Map<String, ReportLog> mReportLogs = new HashMap<String, ReportLog>();
 
+    /** Map from test name to {@link TestResultHistory}. */
+    private final Map<String, TestResultHistoryCollection> mHistories = new HashMap<>();
+
     private final LayoutInflater mLayoutInflater;
 
     /** {@link ListView} row that is either a test category header or a test. */
@@ -192,8 +196,14 @@
     }
 
     public void setTestResult(TestResult testResult) {
-        new SetTestResultTask(testResult.getName(), testResult.getResult(),
-                testResult.getDetails(), testResult.getReportLog()).execute();
+        String name = testResult.getName();
+
+        // Append existing history
+        TestResultHistoryCollection histories = testResult.getHistoryCollection();
+        histories.merge(null, mHistories.get(name));
+
+        new SetTestResultTask(name, testResult.getResult(),
+                testResult.getDetails(), testResult.getReportLog(), histories).execute();
     }
 
     class RefreshTestResultsTask extends AsyncTask<Void, Void, RefreshResult> {
@@ -214,6 +224,8 @@
             mTestDetails.putAll(result.mDetails);
             mReportLogs.clear();
             mReportLogs.putAll(result.mReportLogs);
+            mHistories.clear();
+            mHistories.putAll(result.mHistories);
             notifyDataSetChanged();
         }
     }
@@ -223,16 +235,19 @@
         Map<String, Integer> mResults;
         Map<String, String> mDetails;
         Map<String, ReportLog> mReportLogs;
+        Map<String, TestResultHistoryCollection> mHistories;
 
         RefreshResult(
                 List<TestListItem> items,
                 Map<String, Integer> results,
                 Map<String, String> details,
-                Map<String, ReportLog> reportLogs) {
+                Map<String, ReportLog> reportLogs,
+                Map<String, TestResultHistoryCollection> histories) {
             mItems = items;
             mResults = results;
             mDetails = details;
             mReportLogs = reportLogs;
+            mHistories = histories;
         }
     }
 
@@ -244,12 +259,14 @@
         TestResultsProvider.COLUMN_TEST_RESULT,
         TestResultsProvider.COLUMN_TEST_DETAILS,
         TestResultsProvider.COLUMN_TEST_METRICS,
+        TestResultsProvider.COLUMN_TEST_RESULT_HISTORY,
     };
 
     RefreshResult getRefreshResults(List<TestListItem> items) {
         Map<String, Integer> results = new HashMap<String, Integer>();
         Map<String, String> details = new HashMap<String, String>();
         Map<String, ReportLog> reportLogs = new HashMap<String, ReportLog>();
+        Map<String, TestResultHistoryCollection> histories = new HashMap<>();
         ContentResolver resolver = mContext.getContentResolver();
         Cursor cursor = null;
         try {
@@ -261,9 +278,12 @@
                     int testResult = cursor.getInt(2);
                     String testDetails = cursor.getString(3);
                     ReportLog reportLog = (ReportLog) deserialize(cursor.getBlob(4));
+                    TestResultHistoryCollection historyCollection =
+                        (TestResultHistoryCollection) deserialize(cursor.getBlob(5));
                     results.put(testName, testResult);
                     details.put(testName, testDetails);
                     reportLogs.put(testName, reportLog);
+                    histories.put(testName, historyCollection);
                 } while (cursor.moveToNext());
             }
         } finally {
@@ -271,7 +291,7 @@
                 cursor.close();
             }
         }
-        return new RefreshResult(items, results, details, reportLogs);
+        return new RefreshResult(items, results, details, reportLogs, histories);
     }
 
     class ClearTestResultsTask extends AsyncTask<Void, Void, Void> {
@@ -287,27 +307,28 @@
     class SetTestResultTask extends AsyncTask<Void, Void, Void> {
 
         private final String mTestName;
-
         private final int mResult;
-
         private final String mDetails;
-
         private final ReportLog mReportLog;
+        private final TestResultHistoryCollection mHistoryCollection;
 
         SetTestResultTask(
                 String testName,
                 int result,
                 String details,
-                ReportLog reportLog) {
+                ReportLog reportLog,
+                TestResultHistoryCollection historyCollection) {
             mTestName = testName;
             mResult = result;
             mDetails = details;
             mReportLog = reportLog;
+            mHistoryCollection = historyCollection;
         }
 
         @Override
         protected Void doInBackground(Void... params) {
-            TestResultsProvider.setTestResult(mContext, mTestName, mResult, mDetails, mReportLog);
+            TestResultsProvider.setTestResult(
+                mContext, mTestName, mResult, mDetails, mReportLog, mHistoryCollection);
             return null;
         }
     }
@@ -382,6 +403,19 @@
                 : null;
     }
 
+    /**
+     * Get test result histories.
+     *
+     * @param position The position of test.
+     * @return A {@link TestResultHistoryCollection} object containing test result histories of tests.
+     */
+    public TestResultHistoryCollection getHistoryCollection(int position) {
+        TestListItem item = getItem(position);
+        return mHistories.containsKey(item.testName)
+            ? mHistories.get(item.testName)
+            : null;
+    }
+
     public boolean allTestsPassed() {
         for (TestListItem item : mRows) {
             if (item.isTest() && (!mTestResults.containsKey(item.testName)
@@ -451,7 +485,7 @@
         }
     }
 
-    private static Object deserialize(byte[] bytes) {
+    public static Object deserialize(byte[] bytes) {
         if (bytes == null || bytes.length == 0) {
             return null;
         }
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/TestResult.java b/apps/CtsVerifier/src/com/android/cts/verifier/TestResult.java
index c5d2d52..9f867d5 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/TestResult.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/TestResult.java
@@ -17,6 +17,7 @@
 package com.android.cts.verifier;
 
 import com.android.compatibility.common.util.ReportLog;
+import com.android.compatibility.common.util.TestResultHistory;
 
 import android.app.Activity;
 import android.content.Intent;
@@ -38,11 +39,13 @@
     private static final String TEST_RESULT = "result";
     private static final String TEST_DETAILS = "details";
     private static final String TEST_METRICS = "metrics";
+    private static final String TEST_HISTORY_COLLECTION = "historyCollection";
 
     private final String mName;
     private final int mResult;
     private final String mDetails;
     private final ReportLog mReportLog;
+    private final TestResultHistoryCollection mHistoryCollection;
 
     /** Sets the test activity's result to pass. */
     public static void setPassedResult(Activity activity, String testId, String testDetails) {
@@ -53,7 +56,14 @@
     public static void setPassedResult(Activity activity, String testId, String testDetails,
             ReportLog reportLog) {
         activity.setResult(Activity.RESULT_OK, createResult(activity, TEST_RESULT_PASSED, testId,
-                testDetails, reportLog));
+            testDetails, reportLog, null /*history*/));
+    }
+
+    /** Sets the test activity's result to pass including a test report log result and history. */
+    public static void setPassedResult(Activity activity, String testId, String testDetails,
+            ReportLog reportLog, TestResultHistoryCollection historyCollection) {
+        activity.setResult(Activity.RESULT_OK, createResult(activity, TEST_RESULT_PASSED, testId,
+                testDetails, reportLog, historyCollection));
     }
 
     /** Sets the test activity's result to failed. */
@@ -65,22 +75,30 @@
     public static void setFailedResult(Activity activity, String testId, String testDetails,
             ReportLog reportLog) {
         activity.setResult(Activity.RESULT_OK, createResult(activity, TEST_RESULT_FAILED, testId,
-                testDetails, reportLog));
+                testDetails, reportLog, null /*history*/));
     }
 
-    private static Intent createResult(Activity activity, int testResult, String testName,
-            String testDetails, ReportLog reportLog) {
+    /** Sets the test activity's result to failed including a test report log result and history. */
+    public static void setFailedResult(Activity activity, String testId, String testDetails,
+            ReportLog reportLog, TestResultHistoryCollection historyCollection) {
+        activity.setResult(Activity.RESULT_OK, createResult(activity, TEST_RESULT_FAILED, testId,
+            testDetails, reportLog, historyCollection));
+    }
+
+    public static Intent createResult(Activity activity, int testResult, String testName,
+            String testDetails, ReportLog reportLog, TestResultHistoryCollection historyCollection) {
         Intent data = new Intent(activity, activity.getClass());
-        addResultData(data, testResult, testName, testDetails, reportLog);
+        addResultData(data, testResult, testName, testDetails, reportLog, historyCollection);
         return data;
     }
 
     public static void addResultData(Intent intent, int testResult, String testName,
-            String testDetails, ReportLog reportLog) {
+            String testDetails, ReportLog reportLog, TestResultHistoryCollection historyCollection) {
         intent.putExtra(TEST_NAME, testName);
         intent.putExtra(TEST_RESULT, testResult);
         intent.putExtra(TEST_DETAILS, testDetails);
         intent.putExtra(TEST_METRICS, reportLog);
+        intent.putExtra(TEST_HISTORY_COLLECTION, historyCollection);
     }
 
     /**
@@ -92,15 +110,20 @@
         int result = data.getIntExtra(TEST_RESULT, TEST_RESULT_NOT_EXECUTED);
         String details = data.getStringExtra(TEST_DETAILS);
         ReportLog reportLog = (ReportLog) data.getSerializableExtra(TEST_METRICS);
-        return new TestResult(name, result, details, reportLog);
+        TestResultHistoryCollection historyCollection =
+            (TestResultHistoryCollection) data.getSerializableExtra(TEST_HISTORY_COLLECTION);
+        return new TestResult(name, result, details, reportLog, historyCollection);
     }
 
     private TestResult(
-            String name, int result, String details, ReportLog reportLog) {
+            String name, int result, String details, ReportLog reportLog,
+            TestResultHistoryCollection historyCollection) {
         this.mName = name;
         this.mResult = result;
         this.mDetails = details;
         this.mReportLog = reportLog;
+        this.mHistoryCollection =
+            historyCollection == null ? new TestResultHistoryCollection() : historyCollection;
     }
 
     /** Return the name of the test like "com.android.cts.verifier.foo.FooTest" */
@@ -122,4 +145,9 @@
     public ReportLog getReportLog() {
         return mReportLog;
     }
+
+    /** @return the {@link TestResultHistoryCollection} containing test history */
+    public TestResultHistoryCollection getHistoryCollection() {
+        return mHistoryCollection;
+    }
 }
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/TestResultHistoryCollection.java b/apps/CtsVerifier/src/com/android/cts/verifier/TestResultHistoryCollection.java
new file mode 100644
index 0000000..f92d233
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/TestResultHistoryCollection.java
@@ -0,0 +1,85 @@
+package com.android.cts.verifier;
+
+import com.android.compatibility.common.util.TestResultHistory;
+
+import java.io.Serializable;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+public class TestResultHistoryCollection implements Serializable {
+
+    private static final long serialVersionUID = 0L;
+    private final Set<TestResultHistory> mHistoryCollection = new HashSet<>();
+
+    /**
+     * Covert object to set.
+     *
+     * @return A set of test result history.
+     */
+    public Set<TestResultHistory> asSet() {
+        return mHistoryCollection;
+    }
+
+    /**
+     * Add a test result history with test name, start time, end time and isAutomated.
+     *
+     * @param test a string of test name.
+     * @param start start time of a test.
+     * @param end end time of a test.
+     * @param isAutomated whether test case was executed through automation.
+     */
+    public void add(String test, long start, long end, boolean isAutomated) {
+        Set<TestResultHistory.ExecutionRecord> executionRecords
+                = new HashSet<TestResultHistory.ExecutionRecord> ();
+        executionRecords.add(new TestResultHistory.ExecutionRecord(start, end, isAutomated));
+        mHistoryCollection.add(new TestResultHistory(test, executionRecords));
+    }
+
+    /**
+     * Add test result histories for tests containing test name and a set of ExecutionRecords
+     *
+     * @param test test name.
+     * @param executionRecords set of ExecutionRecords.
+     */
+    public void addAll(String test, Set<TestResultHistory.ExecutionRecord> executionRecords) {
+        TestResultHistory history = new TestResultHistory(test, executionRecords);
+        boolean match = false;
+        for (TestResultHistory resultHistory: mHistoryCollection) {
+            if (resultHistory.getTestName().equals(test)) {
+                resultHistory.getExecutionRecords().addAll(executionRecords);
+                match = true;
+                break;
+            }
+        }
+        if (match == false) {
+            mHistoryCollection.add(history);
+        }
+    }
+
+    /**
+     * Merge test with its sub-tests result histories.
+     *
+     * @param prefix optional test name prefix to apply.
+     * @param resultHistoryCollection a set of test result histories.
+     */
+    public void merge(String prefix, TestResultHistoryCollection resultHistoryCollection) {
+        if (resultHistoryCollection != null) {
+            resultHistoryCollection.asSet().forEach(t-> addAll(
+                prefix != null
+                        ? prefix + ":" + t.getTestName()
+                        : t.getTestName(), t.getExecutionRecords()));
+        }
+    }
+
+    /**
+     * Merge test with its sub-tests result histories.
+     *
+     * @param prefix optional test name prefix to apply.
+     * @param resultHistories a list of test result history collection.
+     */
+    public void merge(String prefix, List<TestResultHistoryCollection> resultHistories) {
+        resultHistories.forEach(resultHistoryCollection -> merge(prefix, resultHistoryCollection));
+    }
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/TestResultsProvider.java b/apps/CtsVerifier/src/com/android/cts/verifier/TestResultsProvider.java
index 64c04eb..bdf32fa 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/TestResultsProvider.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/TestResultsProvider.java
@@ -78,6 +78,9 @@
     /** ReportLog containing the test result metrics. */
     static final String COLUMN_TEST_METRICS = "testmetrics";
 
+    /** TestResultHistory containing the test run histories. */
+    static final String COLUMN_TEST_RESULT_HISTORY = "testresulthistory";
+
     private static final UriMatcher URI_MATCHER = new UriMatcher(UriMatcher.NO_MATCH);
     private static final int RESULTS_ALL = 1;
     private static final int RESULTS_ID = 2;
@@ -120,7 +123,8 @@
                     + COLUMN_TEST_RESULT + " INTEGER,"
                     + COLUMN_TEST_INFO_SEEN + " INTEGER DEFAULT 0,"
                     + COLUMN_TEST_DETAILS + " TEXT,"
-                    + COLUMN_TEST_METRICS + " BLOB);");
+                    + COLUMN_TEST_METRICS + " BLOB,"
+                    + COLUMN_TEST_RESULT_HISTORY + " BLOB);");
         }
 
         @Override
@@ -226,12 +230,13 @@
     }
 
     static void setTestResult(Context context, String testName, int testResult,
-            String testDetails, ReportLog reportLog) {
+            String testDetails, ReportLog reportLog, TestResultHistoryCollection historyCollection) {
         ContentValues values = new ContentValues(2);
         values.put(TestResultsProvider.COLUMN_TEST_RESULT, testResult);
         values.put(TestResultsProvider.COLUMN_TEST_NAME, testName);
         values.put(TestResultsProvider.COLUMN_TEST_DETAILS, testDetails);
         values.put(TestResultsProvider.COLUMN_TEST_METRICS, serialize(reportLog));
+        values.put(TestResultsProvider.COLUMN_TEST_RESULT_HISTORY, serialize(historyCollection));
 
         final Uri uri = getResultContentUri(context);
         ContentResolver resolver = context.getContentResolver();
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/TestResultsReport.java b/apps/CtsVerifier/src/com/android/cts/verifier/TestResultsReport.java
index 8c779c5..d9d63c2 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/TestResultsReport.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/TestResultsReport.java
@@ -29,6 +29,7 @@
 import com.android.compatibility.common.util.ITestResult;
 import com.android.compatibility.common.util.MetricsXmlSerializer;
 import com.android.compatibility.common.util.ReportLog;
+import com.android.compatibility.common.util.TestResultHistory;
 import com.android.compatibility.common.util.TestStatus;
 import com.android.cts.verifier.TestListAdapter.TestListItem;
 
@@ -38,9 +39,16 @@
 import java.io.IOException;
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
+import java.util.Arrays;
+import java.util.ArrayList;
 import java.util.Date;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
 import java.util.Locale;
+import java.util.Map;
 import java.util.Map.Entry;
+import java.util.Set;
 
 /**
  * Helper class for creating an {@code InvocationResult} for CTS result generation.
@@ -103,7 +111,7 @@
         DevicePropertyInfo devicePropertyInfo = new DevicePropertyInfo(Build.CPU_ABI,
                 Build.CPU_ABI2, abis, abis32, abis64, Build.BOARD, Build.BRAND, Build.DEVICE,
                 Build.FINGERPRINT, Build.ID, Build.MANUFACTURER, Build.MODEL, Build.PRODUCT,
-                referenceFingerprint, Build.SERIAL, Build.TAGS, Build.TYPE, versionBaseOs,
+                referenceFingerprint, Build.getSerial(), Build.TAGS, Build.TYPE, versionBaseOs,
                 Build.VERSION.RELEASE, Integer.toString(Build.VERSION.SDK_INT),
                 versionSecurityPatch, Build.VERSION.INCREMENTAL);
 
@@ -135,6 +143,27 @@
                 if (reportLog != null) {
                     currentTestResult.setReportLog(reportLog);
                 }
+
+                TestResultHistoryCollection historyCollection = mAdapter.getHistoryCollection(i);
+                if (historyCollection != null) {
+                    // Get non-terminal prefixes.
+                    Set<String> prefixes = new HashSet<>();
+                    for (TestResultHistory history: historyCollection.asSet()) {
+                        Arrays.stream(history.getTestName().split(":")).reduce(
+                            (total, current) -> { prefixes.add(total);
+                            return total + ":" + current;
+                        });
+                    }
+
+                    // Filter out non-leaf test histories.
+                    List<TestResultHistory> leafTestHistories = new ArrayList<TestResultHistory>();
+                    for (TestResultHistory history: historyCollection.asSet()) {
+                        if (!prefixes.contains(history.getTestName())) {
+                            leafTestHistories.add(history);
+                        }
+                    }
+                    currentTestResult.setTestResultHistories(leafTestHistories);
+                }
             }
         }
         moduleResult.setDone(true);
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/admin/tapjacking/UsbTest.java b/apps/CtsVerifier/src/com/android/cts/verifier/admin/tapjacking/UsbTest.java
index 17319d6..6371857 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/admin/tapjacking/UsbTest.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/admin/tapjacking/UsbTest.java
@@ -17,6 +17,7 @@
 package com.android.cts.verifier.admin.tapjacking;
 
 import android.content.Intent;
+import android.content.res.Resources;
 import android.graphics.PixelFormat;
 import android.os.Bundle;
 import android.provider.Settings;
@@ -26,6 +27,7 @@
 import android.view.ViewGroup;
 import android.view.WindowManager;
 import android.widget.Button;
+import android.widget.TextView;
 import android.widget.Toast;
 
 import com.android.cts.verifier.PassFailButtons;
@@ -33,6 +35,7 @@
 
 public class UsbTest extends PassFailButtons.Activity {
     private View mOverlay;
+    private TextView mUsbTapjackingInstructions;
     private Button mTriggerOverlayButton;
 
     public static final String LOG_TAG = "UsbTest";
@@ -45,8 +48,12 @@
         setInfoResources(R.string.usb_tapjacking_test,
                 R.string.usb_tapjacking_test_info, -1);
 
+        String usbDebuggingComponent = getString(R.string.usb_tapjacking_usb_debugging_component);
+        mUsbTapjackingInstructions = findViewById(R.id.usb_tapjacking_instructions);
+        mUsbTapjackingInstructions.setText(
+                getString(R.string.usb_tapjacking_test_instructions, usbDebuggingComponent));
         //initialise the escalate button and set a listener
-        mTriggerOverlayButton = (Button) findViewById(R.id.tapjacking_btn);
+        mTriggerOverlayButton = findViewById(R.id.tapjacking_btn);
         mTriggerOverlayButton.setEnabled(true);
         mTriggerOverlayButton.setOnClickListener(new View.OnClickListener() {
             @Override
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audio/HifiUltrasoundSpeakerTestActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/audio/HifiUltrasoundSpeakerTestActivity.java
index 1a9ffac..01802a4 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/audio/HifiUltrasoundSpeakerTestActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audio/HifiUltrasoundSpeakerTestActivity.java
@@ -37,9 +37,12 @@
 import android.widget.TextView;
 import java.util.Arrays;
 
+import com.androidplot.xy.PointLabelFormatter;
+import com.androidplot.xy.LineAndPointFormatter;
 import com.androidplot.xy.SimpleXYSeries;
+import com.androidplot.xy.XYPlot;
 import com.androidplot.xy.XYSeries;
-import com.androidplot.xy.*;
+import com.androidplot.xy.XYStepMode;
 
 import com.android.compatibility.common.util.CddTest;
 
@@ -277,9 +280,9 @@
             Arrays.asList(powerWrap),
             "");
         LineAndPointFormatter seriesFormat = new LineAndPointFormatter();
+        seriesFormat.setPointLabelFormatter(new PointLabelFormatter());
         seriesFormat.configure(getApplicationContext(),
             R.xml.ultrasound_line_formatter_trials);
-        seriesFormat.setPointLabelFormatter(null);
         plot.addSeries(series, seriesFormat);
       }
 
@@ -294,9 +297,9 @@
           Arrays.asList(noiseDBWrap),
           "background noise");
       LineAndPointFormatter noiseSeriesFormat = new LineAndPointFormatter();
+      noiseSeriesFormat.setPointLabelFormatter(new PointLabelFormatter());
       noiseSeriesFormat.configure(getApplicationContext(),
           R.xml.ultrasound_line_formatter_noise);
-      noiseSeriesFormat.setPointLabelFormatter(null);
       plot.addSeries(noiseSeries, noiseSeriesFormat);
 
       double[] dB = wavAnalyzerTask.getDB();
@@ -310,9 +313,9 @@
           Arrays.asList(dBWrap),
           "median");
       LineAndPointFormatter seriesFormat = new LineAndPointFormatter();
+      seriesFormat.setPointLabelFormatter(new PointLabelFormatter());
       seriesFormat.configure(getApplicationContext(),
           R.xml.ultrasound_line_formatter_median);
-      seriesFormat.setPointLabelFormatter(null);
       plot.addSeries(series, seriesFormat);
 
       Double[] passX = new Double[] {Common.MIN_FREQUENCY_HZ, Common.MAX_FREQUENCY_HZ};
@@ -320,9 +323,9 @@
       XYSeries passSeries = new SimpleXYSeries(
           Arrays.asList(passX), Arrays.asList(passY), "passing");
       LineAndPointFormatter passSeriesFormat = new LineAndPointFormatter();
+      passSeriesFormat.setPointLabelFormatter(new PointLabelFormatter());
       passSeriesFormat.configure(getApplicationContext(),
           R.xml.ultrasound_line_formatter_pass);
-      passSeriesFormat.setPointLabelFormatter(null);
       plot.addSeries(passSeries, passSeriesFormat);
     }
   }
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audio/HifiUltrasoundTestActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/audio/HifiUltrasoundTestActivity.java
index 0276e60..9aebbc4 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/audio/HifiUltrasoundTestActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audio/HifiUltrasoundTestActivity.java
@@ -37,9 +37,12 @@
 import android.widget.TextView;
 import java.util.Arrays;
 
+import com.androidplot.xy.PointLabelFormatter;
+import com.androidplot.xy.LineAndPointFormatter;
 import com.androidplot.xy.SimpleXYSeries;
+import com.androidplot.xy.XYPlot;
 import com.androidplot.xy.XYSeries;
-import com.androidplot.xy.*;
+import com.androidplot.xy.XYStepMode;
 
 public class HifiUltrasoundTestActivity extends PassFailButtons.Activity {
 
@@ -236,9 +239,9 @@
             Arrays.asList(powerWrap),
             "");
         LineAndPointFormatter seriesFormat = new LineAndPointFormatter();
+        seriesFormat.setPointLabelFormatter(new PointLabelFormatter());
         seriesFormat.configure(getApplicationContext(),
             R.xml.ultrasound_line_formatter_trials);
-        seriesFormat.setPointLabelFormatter(null);
         plot.addSeries(series, seriesFormat);
       }
 
@@ -253,9 +256,9 @@
           Arrays.asList(noiseDBWrap),
           "background noise");
       LineAndPointFormatter noiseSeriesFormat = new LineAndPointFormatter();
+      noiseSeriesFormat.setPointLabelFormatter(new PointLabelFormatter());
       noiseSeriesFormat.configure(getApplicationContext(),
           R.xml.ultrasound_line_formatter_noise);
-      noiseSeriesFormat.setPointLabelFormatter(null);
       plot.addSeries(noiseSeries, noiseSeriesFormat);
 
       double[] dB = wavAnalyzerTask.getDB();
@@ -269,9 +272,9 @@
           Arrays.asList(dBWrap),
           "median");
       LineAndPointFormatter seriesFormat = new LineAndPointFormatter();
+      seriesFormat.setPointLabelFormatter(new PointLabelFormatter());
       seriesFormat.configure(getApplicationContext(),
           R.xml.ultrasound_line_formatter_median);
-      seriesFormat.setPointLabelFormatter(null);
       plot.addSeries(series, seriesFormat);
 
       Double[] passX = new Double[] {Common.MIN_FREQUENCY_HZ, Common.MAX_FREQUENCY_HZ};
@@ -279,9 +282,9 @@
       XYSeries passSeries = new SimpleXYSeries(
           Arrays.asList(passX), Arrays.asList(passY), "passing");
       LineAndPointFormatter passSeriesFormat = new LineAndPointFormatter();
+      passSeriesFormat.setPointLabelFormatter(new PointLabelFormatter());
       passSeriesFormat.configure(getApplicationContext(),
           R.xml.ultrasound_line_formatter_pass);
-      passSeriesFormat.setPointLabelFormatter(null);
       plot.addSeries(passSeries, passSeriesFormat);
     }
   }
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audio/RingerModeActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/audio/RingerModeActivity.java
index b6e744e..fdb57fc 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/audio/RingerModeActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audio/RingerModeActivity.java
@@ -113,20 +113,22 @@
             return tests;
         }
         tests.add(new SetModeAllTest());
-        tests.add(new SetModeAlarmsTest());
+        tests.add(new SetModePriorityTest());
         tests.add(new TestAccessRingerModeDndOn());
         tests.add(new TestVibrateNotificationDndOn());
         tests.add(new TestVibrateRingerDndOn());
         tests.add(new TestSetRingerModePolicyAccessDndOn());
-        tests.add(new TestVolumeDndAffectedStreamDndOn());
-        tests.add(new TestAdjustVolumeInAlarmsOnlyMode());
+        // TODO: Add a @TestApi method to query the VolumPolicy
+        //tests.add(new TestVolumeDndAffectedStreamDndOn());
+        tests.add(new TestAdjustVolumeInPriorityOnlyAllowAlarmsMediaMode());
 
         tests.add(new SetModeAllTest());
         tests.add(new TestAccessRingerMode());
         tests.add(new TestVibrateNotification());
         tests.add(new TestVibrateRinger());
         tests.add(new TestSetRingerModePolicyAccess());
-        tests.add(new TestVolumeDndAffectedStream());
+        // TODO: Add a @TestApi method to query the VolumPolicy
+        //tests.add(new TestVolumeDndAffectedStream());
         tests.add(new TestVolume());
         tests.add(new TestMuteStreams());
         tests.add(new EnableSoundEffects());
@@ -149,34 +151,41 @@
     }
 
     private void testStreamMuting(int stream) {
-        mAudioManager.adjustStreamVolume(stream, AudioManager.ADJUST_MUTE, 0);
-        assertTrue("Muting stream " + stream + " failed.",
-                mAudioManager.isStreamMute(stream));
+        if (stream == AudioManager.STREAM_VOICE_CALL) {
+            // Voice call requires MODIFY_PHONE_STATE, so we should not be able to mute
+            mAudioManager.adjustStreamVolume(stream, AudioManager.ADJUST_MUTE, 0);
+            assertFalse("Muting stream " + stream + " should require MODIFY_PHONE_STATE permission.",
+                    mAudioManager.isStreamMute(stream));
+        } else {
+            mAudioManager.adjustStreamVolume(stream, AudioManager.ADJUST_MUTE, 0);
+            assertTrue("Muting stream " + stream + " failed.",
+                    mAudioManager.isStreamMute(stream));
 
-        mAudioManager.adjustStreamVolume(stream, AudioManager.ADJUST_UNMUTE, 0);
-        assertFalse("Unmuting stream " + stream + " failed.",
-                mAudioManager.isStreamMute(stream));
+            mAudioManager.adjustStreamVolume(stream, AudioManager.ADJUST_UNMUTE, 0);
+            assertFalse("Unmuting stream " + stream + " failed.",
+                    mAudioManager.isStreamMute(stream));
 
-        mAudioManager.adjustStreamVolume(stream, AudioManager.ADJUST_TOGGLE_MUTE, 0);
-        assertTrue("Toggling mute on stream " + stream + " failed.",
-                mAudioManager.isStreamMute(stream));
+            mAudioManager.adjustStreamVolume(stream, AudioManager.ADJUST_TOGGLE_MUTE, 0);
+            assertTrue("Toggling mute on stream " + stream + " failed.",
+                    mAudioManager.isStreamMute(stream));
 
-        mAudioManager.adjustStreamVolume(stream, AudioManager.ADJUST_TOGGLE_MUTE, 0);
-        assertFalse("Toggling mute on stream " + stream + " failed.",
-                mAudioManager.isStreamMute(stream));
+            mAudioManager.adjustStreamVolume(stream, AudioManager.ADJUST_TOGGLE_MUTE, 0);
+            assertFalse("Toggling mute on stream " + stream + " failed.",
+                    mAudioManager.isStreamMute(stream));
 
-        mAudioManager.setStreamMute(stream, true);
-        assertTrue("Muting stream " + stream + " using setStreamMute failed",
-                mAudioManager.isStreamMute(stream));
+            mAudioManager.setStreamMute(stream, true);
+            assertTrue("Muting stream " + stream + " using setStreamMute failed",
+                    mAudioManager.isStreamMute(stream));
 
-        // mute it three more times to verify the ref counting is gone.
-        mAudioManager.setStreamMute(stream, true);
-        mAudioManager.setStreamMute(stream, true);
-        mAudioManager.setStreamMute(stream, true);
+            // mute it three more times to verify the ref counting is gone.
+            mAudioManager.setStreamMute(stream, true);
+            mAudioManager.setStreamMute(stream, true);
+            mAudioManager.setStreamMute(stream, true);
 
-        mAudioManager.setStreamMute(stream, false);
-        assertFalse("Unmuting stream " + stream + " using setStreamMute failed.",
-                mAudioManager.isStreamMute(stream));
+            mAudioManager.setStreamMute(stream, false);
+            assertFalse("Unmuting stream " + stream + " using setStreamMute failed.",
+                    mAudioManager.isStreamMute(stream));
+        }
     }
 
     // Tests
@@ -216,10 +225,10 @@
         }
     }
 
-    protected class SetModeAlarmsTest extends InteractiveTestCase {
+    protected class SetModePriorityTest extends InteractiveTestCase {
         @Override
         protected View inflate(ViewGroup parent) {
-            return createRetryItem(parent, R.string.attention_filter_alarms);
+            return createRetryItem(parent, R.string.attention_filter_priority_mimic_alarms_only);
         }
 
         @Override
@@ -625,17 +634,12 @@
                 return;
             }
 
-            mAudioManager.setRingerMode(RINGER_MODE_VIBRATE);
             if (mHasVibrator) {
+                mAudioManager.setRingerMode(RINGER_MODE_VIBRATE);
                 if (RINGER_MODE_VIBRATE != mAudioManager.getRingerMode()) {
                     setFailed();
                     return;
                 }
-            } else {
-                if (RINGER_MODE_NORMAL != mAudioManager.getRingerMode()) {
-                    setFailed();
-                    return;
-                }
             }
             status = PASS;
         }
@@ -1077,29 +1081,35 @@
 
             int muteAffectedStreams = Settings.System.getInt(mContext.getContentResolver(),
                     Settings.System.MUTE_STREAMS_AFFECTED,
-                    // Same defaults as in AudioService. Should be kept in
-                    // sync.
-                    ((1 << AudioManager.STREAM_MUSIC) |
+                    // same defaults as in AudioService. Should be kept in sync.
+                    (1 << STREAM_MUSIC) |
                             (1 << AudioManager.STREAM_RING) |
                             (1 << AudioManager.STREAM_NOTIFICATION) |
-                            (1 << AudioManager.STREAM_SYSTEM)));
+                            (1 << AudioManager.STREAM_SYSTEM) |
+                            (1 << AudioManager.STREAM_VOICE_CALL));
+
             for (int stream : streams) {
                 // ensure each stream is on and turned up.
-                mAudioManager.setStreamVolume(stream,
-                        mAudioManager.getStreamMaxVolume(stream),
-                        0);
+                mAudioManager.setStreamVolume(stream, mAudioManager.getStreamMaxVolume(stream), 0);
                 if (((1 << stream) & muteAffectedStreams) == 0) {
-                    mAudioManager.adjustStreamVolume(stream, AudioManager.ADJUST_MUTE, 0);
-                    assertFalse("Stream " + stream + " should not be affected by mute.",
-                            mAudioManager.isStreamMute(stream));
-                    mAudioManager.setStreamMute(stream, true);
-                    assertFalse("Stream " + stream + " should not be affected by mute.",
-                            mAudioManager.isStreamMute(stream));
-                    mAudioManager.adjustStreamVolume(stream, AudioManager.ADJUST_TOGGLE_MUTE,
-                            0);
-                    assertFalse("Stream " + stream + " should not be affected by mute.",
-                            mAudioManager.isStreamMute(stream));
-                    continue;
+                    if (stream == AudioManager.STREAM_VOICE_CALL) {
+                        // Voice call requires MODIFY_PHONE_STATE, so we should not be able to mute
+                        mAudioManager.adjustStreamVolume(stream, AudioManager.ADJUST_MUTE, 0);
+                        assertTrue("Voice call stream (" + stream + ") should require MODIFY_PHONE_STATE "
+                                + "to mute.", mAudioManager.isStreamMute(stream));
+                    } else {
+                        mAudioManager.adjustStreamVolume(stream, AudioManager.ADJUST_MUTE, 0);
+                        assertFalse("Stream " + stream + " should not be affected by mute.",
+                                mAudioManager.isStreamMute(stream));
+                        mAudioManager.setStreamMute(stream, true);
+                        assertFalse("Stream " + stream + " should not be affected by mute.",
+                                mAudioManager.isStreamMute(stream));
+                        mAudioManager.adjustStreamVolume(stream, AudioManager.ADJUST_TOGGLE_MUTE,
+                                0);
+                        assertFalse("Stream " + stream + " should not be affected by mute.",
+                                mAudioManager.isStreamMute(stream));
+                        continue;
+                    }
                 }
                 testStreamMuting(stream);
             }
@@ -1107,7 +1117,7 @@
         }
     }
 
-    protected class TestAdjustVolumeInAlarmsOnlyMode extends InteractiveTestCase {
+    protected class TestAdjustVolumeInPriorityOnlyAllowAlarmsMediaMode extends InteractiveTestCase {
         @Override
         protected View inflate(ViewGroup parent) {
             return createAutoItem(parent, R.string.test_volume_dnd_affected_stream);
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audio/USBAudioPeripheralButtonsActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/audio/USBAudioPeripheralButtonsActivity.java
index 2149ed7..87b2149 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/audio/USBAudioPeripheralButtonsActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audio/USBAudioPeripheralButtonsActivity.java
@@ -92,13 +92,11 @@
             if (match && mButtonAttributes.mHasBtnA != mHasBtnA) {
                 match = false;
             }
-            if (!interceptedVolume) {
-                if (match && mButtonAttributes.mHasBtnB != mHasBtnB) {
-                    match = false;
-                }
-                if (match && mButtonAttributes.mHasBtnC != mHasBtnC) {
-                    match = false;
-                }
+            if (match && mButtonAttributes.mHasBtnB != mHasBtnB && !interceptedVolume) {
+                match = false;
+            }
+            if (match && mButtonAttributes.mHasBtnC != mHasBtnC && !interceptedVolume) {
+                match = false;
             }
             Log.i(TAG, "match:" + match);
             getPassButton().setEnabled(match);
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audio/peripheralprofile/AudioStringsHelper.java b/apps/CtsVerifier/src/com/android/cts/verifier/audio/peripheralprofile/AudioStringsHelper.java
index e9acacc..f9ffd86 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/audio/peripheralprofile/AudioStringsHelper.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audio/peripheralprofile/AudioStringsHelper.java
@@ -16,7 +16,7 @@
 
 package com.android.cts.verifier.audio.peripheralprofile;
 
-import android.support.annotation.NonNull;
+import androidx.annotation.NonNull;
 
 public class AudioStringsHelper {
     // These correspond to encoding constants defined in AudioFormats.java
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audio/peripheralprofile/PeripheralProfile.java b/apps/CtsVerifier/src/com/android/cts/verifier/audio/peripheralprofile/PeripheralProfile.java
index a0cff31..eeff790 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/audio/peripheralprofile/PeripheralProfile.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audio/peripheralprofile/PeripheralProfile.java
@@ -17,7 +17,7 @@
 package com.android.cts.verifier.audio.peripheralprofile;
 
 import android.media.AudioDeviceInfo;
-import android.support.annotation.NonNull;
+import androidx.annotation.NonNull;
 
 import com.android.cts.verifier.audio.peripheralprofile.ListsHelper;
 
@@ -122,7 +122,12 @@
         String[] strings = intList.split(",");
         int[] ints = new int[strings.length];
         for (int index = 0; index < strings.length; index++) {
-            ints[index] = Integer.parseInt(strings[index]);
+            try {
+                ints[index] = Integer.parseInt(strings[index]);
+            }
+            catch (NumberFormatException ex) {
+                ints[index] = 0;
+            }
         }
         return ints;
     }
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audio/peripheralprofile/ProfileManager.java b/apps/CtsVerifier/src/com/android/cts/verifier/audio/peripheralprofile/ProfileManager.java
index 9839084..c0a6b05 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/audio/peripheralprofile/ProfileManager.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audio/peripheralprofile/ProfileManager.java
@@ -17,7 +17,7 @@
 package com.android.cts.verifier.audio.peripheralprofile;
 
 import android.os.Environment;
-import android.support.annotation.Nullable;
+import androidx.annotation.Nullable;
 import android.util.Log;
 import android.util.Xml;
 
@@ -49,7 +49,7 @@
             "<ProfileList Version=\"1.0.0\">" +
             "<PeripheralProfile ProfileName=\"AudioBox USB 96\" ProfileDescription=\"PreSonus AudioBox USB 96\" ProductName=\"USB-Audio - AudioBox USB 96\">" +
                 "<OutputDevInfo ChanCounts=\"2\" ChanPosMasks=\"12\" ChanIndexMasks=\"3\" Encodings=\"4\" SampleRates=\"44100,48000,88200,96000\"/>" +
-                "<InputDevInfo ChanCounts=\"1, 2\" ChanPosMasks=\"12, 16\" ChanIndexMasks=\"1,3\" Encodings=\"4\" SampleRates=\"44100,48000,88200,96000\"/>" +
+                "<InputDevInfo ChanCounts=\"1,2\" ChanPosMasks=\"12,16\" ChanIndexMasks=\"1,3\" Encodings=\"4\" SampleRates=\"44100,48000,88200,96000\"/>" +
             "</PeripheralProfile>" +
             "<PeripheralProfile ProfileName=\"AudioBox 44VSL\" ProfileDescription=\"Presonus AudioBox 44VSL\" ProductName=\"USB-Audio - AudioBox 44 VSL\">" +
                 "<OutputDevInfo ChanCounts=\"2,3,4\" ChanPosMasks=\"12\" ChanIndexMasks=\"3,7,15\" Encodings=\"4\" SampleRates=\"44100,48000,88200,96000\" />" +
@@ -78,17 +78,19 @@
             "<PeripheralProfile ProfileName=\"Pixel USB-C Dongle + Wired Analog Headset\" ProfileDescription=\"Reference USB Dongle\" ProductName=\"USB-Audio - USB-C to 3.5mm-Headphone Adapte\">" +
                 "<OutputDevInfo ChanCounts=\"2\" ChanPosMasks=\"12\" ChanIndexMasks=\"3\" Encodings=\"4\" SampleRates=\"48000\" />" +
                 "<InputDevInfo ChanCounts=\"1,2\" ChanPosMasks=\"12,16\" ChanIndexMasks=\"3\" Encodings=\"4\" SampleRates=\"48000\" />" +
-                "<ButtonInfo HasBtnA=\"1\" HasBtnB=\"1\" HasBtnC=\"1\" HasBtnD=\"1\" />" +
+                "<ButtonInfo HasBtnA=\"1\" HasBtnB=\"1\" HasBtnC=\"1\" />" +
             "</PeripheralProfile>" +
-            "<PeripheralProfile ProfileName=\"gen1-headset\" ProfileDescription=\"Reference USB Headset\" ProductName=\"USB-Audio - Skylab\">" +
-            "<OutputDevInfo ChanCounts=\"2\" ChanPosMasks=\"12\" ChanIndexMasks=\"3\" Encodings=\"2,4\" SampleRates=\"8000,16000,32000,44100,48000\" />" +
-            "<InputDevInfo ChanCounts=\"1,2\" ChanPosMasks=\"12,16\" ChanIndexMasks=\"1\" Encodings=\"2\" SampleRates=\"8000,16000,32000,44100,48000\" />" +
-            "<ButtonInfo HasBtnA=\"1\" HasBtnB=\"1\" HasBtnC=\"1\" HasBtnD=\"1\" />" +
-          "</PeripheralProfile>" +
-          "<PeripheralProfile ProfileName=\"mir\" ProfileDescription=\"Reference USB Dongle\" ProductName=\"USB-Audio - USB Audio\">" +
-            "<OutputDevInfo ChanCounts=\"2\" ChanPosMasks=\"12\" ChanIndexMasks=\"3\" Encodings=\"4\" SampleRates=\"48000\" />" +
-          "</PeripheralProfile>" +
-          "</ProfileList>";
+            "<PeripheralProfile ProfileName=\"HTC Dongle\" ProfileDescription=\"Type-C to 3.5mm Headphone\" ProductName=\"USB-Audio - HTC Type-C to 3.5mm Headphone J\">" +
+                "<OutputDevInfo ChanCounts=\"2\" ChanPosMasks=\"12\" ChanIndexMasks=\"3\" Encodings=\"4\" SampleRates=\"48000\" />" +
+                "<InputDevInfo ChanCounts=\"1,2\" ChanPosMasks=\"12,16\" ChanIndexMasks=\"3\" Encodings=\"4\" SampleRates=\"48000\"/>" +
+                "<ButtonInfo HasBtnA=\"1\" HasBtnB=\"1\" HasBtnC=\"1\" />" +
+            "</PeripheralProfile>" +
+            "<PeripheralProfile ProfileName=\"JBL Reflect Aware\" ProfileDescription=\"JBL Reflect Aware\" ProductName=\"USB-Audio - JBL Reflect Aware\">" +
+                "<OutputDevInfo ChanCounts=\"2\" ChanPosMasks=\"12\" ChanIndexMasks=\"3\" Encodings=\"2\" SampleRates=\"44100,48000\" />" +
+                "<InputDevInfo ChanCounts=\"1\" ChanPosMasks=\"16\" ChanIndexMasks=\"1\" Encodings=\"2\" SampleRates=\"44100,48000\" />" +
+                "<ButtonInfo HasBtnA=\"1\" HasBtnB=\"1\" HasBtnC=\"1\" />" +
+            "</PeripheralProfile>" +
+        "</ProfileList>";
 
     // XML Tags and Attributes
     private final static String kTag_ProfileList = "ProfileList";
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/BleAdvertiserService.java b/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/BleAdvertiserService.java
index 5a60ad0..b776143 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/BleAdvertiserService.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/BleAdvertiserService.java
@@ -126,11 +126,13 @@
         mCallback = new BLEAdvertiseCallback();
         mScannableCallback = new BLEAdvertiseCallback();
         mUnscannableCallback = new BLEAdvertiseCallback();
+        // Medium is last. Android TV has a medium advertiser running already, and if only four
+        // are available, that's the one we want to fail
         mPowerLevel = new int[]{
             AdvertiseSettings.ADVERTISE_TX_POWER_ULTRA_LOW,
             AdvertiseSettings.ADVERTISE_TX_POWER_LOW,
-            AdvertiseSettings.ADVERTISE_TX_POWER_MEDIUM,
-            AdvertiseSettings.ADVERTISE_TX_POWER_HIGH};
+            AdvertiseSettings.ADVERTISE_TX_POWER_HIGH,
+            AdvertiseSettings.ADVERTISE_TX_POWER_MEDIUM};
         mPowerCallback = new HashMap<Integer, AdvertiseCallback>();
         for (int x : mPowerLevel) {
             mPowerCallback.put(x, new BLEAdvertiseCallback());
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/BleClientService.java b/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/BleClientService.java
index 6b686e0..1fd6a2b 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/BleClientService.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/BleClientService.java
@@ -148,8 +148,8 @@
             "com.android.cts.verifier.bluetooth.BLE_CLIENT_ACTION_CLIENT_CONNECT";
     public static final String BLE_CLIENT_ACTION_CLIENT_CONNECT_SECURE =
             "com.android.cts.verifier.bluetooth.BLE_CLIENT_ACTION_CLIENT_CONNECT_SECURE";
-    public static final String BLE_CLIENT_ACTION_BLE_DISVOCER_SERVICE =
-            "com.android.cts.verifier.bluetooth.BLE_CLIENT_ACTION_BLE_DISVOCER_SERVICE";
+    public static final String BLE_CLIENT_ACTION_BLE_DISCOVER_SERVICE =
+            "com.android.cts.verifier.bluetooth.BLE_CLIENT_ACTION_BLE_DISCOVER_SERVICE";
     public static final String BLE_CLIENT_ACTION_REQUEST_MTU_23 =
             "com.android.cts.verifier.bluetooth.BLE_CLIENT_ACTION_REQUEST_MTU_23";
     public static final String BLE_CLIENT_ACTION_REQUEST_MTU_512 =
@@ -353,7 +353,7 @@
                     mExecReliableWrite = ReliableWriteState.RELIABLE_WRITE_NONE;
                     startScan();
                     break;
-                case BLE_CLIENT_ACTION_BLE_DISVOCER_SERVICE:
+                case BLE_CLIENT_ACTION_BLE_DISCOVER_SERVICE:
                     if (mBluetoothGatt != null && mBleState == BluetoothProfile.STATE_CONNECTED) {
                         mBluetoothGatt.discoverServices();
                     } else {
@@ -770,13 +770,13 @@
     }
 
     private void notifyReliableWriteCompleted() {
-        showMessage("Reliable write compelte");
+        showMessage("Reliable write complete");
         Intent intent = new Intent(BLE_RELIABLE_WRITE_COMPLETED);
         sendBroadcast(intent);
     }
 
     private void notifyReliableWriteBadRespCompleted(String err) {
-        showMessage("Reliable write(bad response) compelte");
+        showMessage("Reliable write(bad response) complete");
         Intent intent = new Intent(BLE_RELIABLE_WRITE_BAD_RESP_COMPLETED);
         if (err != null) {
             intent.putExtra(EXTRA_ERROR_MESSAGE, err);
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/BleClientTestBaseActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/BleClientTestBaseActivity.java
index df9a368..b5220f8 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/BleClientTestBaseActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/BleClientTestBaseActivity.java
@@ -35,6 +35,7 @@
 
 import java.util.ArrayList;
 import java.util.List;
+import android.util.Log;
 
 public class BleClientTestBaseActivity extends PassFailButtons.Activity {
     public static final String TAG = "BleClientTestBase";
@@ -62,7 +63,7 @@
     private static final int PASS_FLAG_ALL = 0x3FFFF;
 
     private final int BLE_CLIENT_CONNECT = 0;
-    private final int BLE_BLE_DISVOCER_SERVICE = 1;
+    private final int BLE_BLE_DISCOVER_SERVICE = 1;
     private final int BLE_READ_CHARACTERISTIC = 2;
     private final int BLE_WRITE_CHARACTERISTIC = 3;
     private final int BLE_REQUEST_MTU_23BYTES = 4;
@@ -90,8 +91,6 @@
         super.onCreate(savedInstanceState);
         setContentView(R.layout.ble_server_start);
         setPassFailButtonClickListeners();
-        setInfoResources(R.string.ble_client_test_name,
-                R.string.ble_client_test_info, -1);
         getPassButton().setEnabled(false);
 
         mTestAdapter = new TestAdapter(this, setupTestList());
@@ -212,7 +211,11 @@
             String action = intent.getAction();
             String newAction = null;
             String actionName = null;
+            long previousPassed = mPassed;
             final Intent startIntent = new Intent(BleClientTestBaseActivity.this, BleClientService.class);
+            if (action != null) {
+                Log.d(TAG, "Processing " + action);
+            }
             switch (action) {
             case BleClientService.BLE_BLUETOOTH_DISABLED:
                 showErrorDialog(R.string.ble_bluetooth_disable_title, R.string.ble_bluetooth_disable_message, true);
@@ -222,11 +225,11 @@
                 mTestAdapter.setTestPass(BLE_CLIENT_CONNECT);
                 mPassed |= PASS_FLAG_CONNECT;
                 // execute service discovery test
-                newAction = BleClientService.BLE_CLIENT_ACTION_BLE_DISVOCER_SERVICE;
+                newAction = BleClientService.BLE_CLIENT_ACTION_BLE_DISCOVER_SERVICE;
                 break;
             case BleClientService.BLE_SERVICES_DISCOVERED:
                 actionName = getString(R.string.ble_discover_service_name);
-                mTestAdapter.setTestPass(BLE_BLE_DISVOCER_SERVICE);
+                mTestAdapter.setTestPass(BLE_BLE_DISCOVER_SERVICE);
                 mPassed |= PASS_FLAG_DISCOVER;
                 // execute MTU requesting test (23bytes)
                 newAction = BleClientService.BLE_CLIENT_ACTION_READ_CHARACTERISTIC;
@@ -283,6 +286,7 @@
 
                 // skip Reliable write (bad response) test
                 mPassed |= PASS_FLAG_RELIABLE_WRITE_BAD_RESP;
+                Log.d(TAG, "Skip PASS_FLAG_RELIABLE_WRITE_BAD_RESP.");
                 newAction = BleClientService.BLE_CLIENT_ACTION_NOTIFY_CHARACTERISTIC;
                 showProgressDialog = true;
                 break;
@@ -342,6 +346,7 @@
                 // newAction = BleClientService.BLE_CLIENT_ACTION_READ_RSSI;
                 // execute disconnection test
                 mPassed |= PASS_FLAG_READ_RSSI;
+                Log.d(TAG, "Skip PASS_FLAG_READ_RSSI.");
                 newAction = BleClientService.BLE_CLIENT_ACTION_CLIENT_DISCONNECT;
                 break;
             case BleClientService.BLE_READ_REMOTE_RSSI:
@@ -365,9 +370,16 @@
                 break;
             }
 
+            if (previousPassed != mPassed) {
+                String logMessage = String.format("Passed Flags has changed from 0x%08X to 0x%08X. Delta=0x%08X",
+                                                  previousPassed, mPassed, mPassed ^ previousPassed);
+                Log.d(TAG, logMessage);
+            }
+
             mTestAdapter.notifyDataSetChanged();
 
             if (newAction != null) {
+                Log.d(TAG, "Starting " + newAction);
                 startIntent.setAction(newAction);
                 if (STEP_EXECUTION) {
                     closeDialog();
@@ -399,6 +411,7 @@
             }
 
             if (mPassed == PASS_FLAG_ALL) {
+                Log.d(TAG, "All Tests Passed.");
                 if (shouldRebootBluetoothAfterTest()) {
                     mBtPowerSwitcher.executeSwitching();
                 } else {
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/BleConnectionPriorityClientBaseActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/BleConnectionPriorityClientBaseActivity.java
index 41a7c46..9119aae 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/BleConnectionPriorityClientBaseActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/BleConnectionPriorityClientBaseActivity.java
@@ -66,8 +66,6 @@
         mTestAdapter = new TestAdapter(this, setupTestList());

         ListView listView = (ListView) findViewById(R.id.ble_client_connection_tests);

         listView.setAdapter(mTestAdapter);

-        listView.setEnabled(false);

-        listView.setClickable(false);

     }

 

     @Override

diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/BleInsecureClientStartActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/BleInsecureClientStartActivity.java
index 03256fb..d0a6e80 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/BleInsecureClientStartActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/BleInsecureClientStartActivity.java
@@ -18,6 +18,7 @@
 

 import android.content.Intent;

 import android.os.Bundle;

+import com.android.cts.verifier.R;

 

 public class BleInsecureClientStartActivity extends BleClientTestBaseActivity {

     private Intent mIntent;

@@ -25,6 +26,10 @@
     @Override

     public void onCreate(Bundle savedInstanceState) {

         super.onCreate(savedInstanceState);

+

+        setInfoResources(R.string.ble_client_test_name,

+                R.string.ble_insecure_client_test_info, -1);

+

         mIntent = new Intent(this, BleClientService.class);

         mIntent.setAction(BleClientService.BLE_CLIENT_ACTION_CLIENT_CONNECT);

 

diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/BleScannerService.java b/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/BleScannerService.java
index 2bcd86a..8496905 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/BleScannerService.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/BleScannerService.java
@@ -37,6 +37,7 @@
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
+import java.util.UUID;
 
 public class BleScannerService extends Service {
 
@@ -74,6 +75,9 @@
             "com.google.cts.verifier.bluetooth.EXTRA_DATA";
 
     private static final byte MANUFACTURER_TEST_ID = (byte)0x07;
+    public static final UUID ATV_REMOTE_UUID =
+            UUID.fromString("cbbfe0e1-f7f3-4206-84e0-84cbb3d09dfc");
+    public static final int MEDIUM_POWER_DBM = -7;
 
     private BluetoothManager mBluetoothManager;
     private BluetoothAdapter mAdapter;
@@ -81,6 +85,7 @@
     private ScanCallback mCallback;
     private Handler mHandler;
     private String mOldMac;
+    private boolean mMediumRcvd;
 
     @Override
     public void onCreate() {
@@ -89,6 +94,7 @@
         mCallback = new BLEScanCallback();
         mHandler = new Handler();
         mOldMac = null;
+        mMediumRcvd = false;
 
         mBluetoothManager = (BluetoothManager) getSystemService(Context.BLUETOOTH_SERVICE);
         mAdapter = mBluetoothManager.getAdapter();
@@ -111,6 +117,9 @@
                             BleAdvertiserService.POWER_LEVEL_DATA,
                             BleAdvertiserService.POWER_LEVEL_MASK)
                         .build());
+                    filters.add(new ScanFilter.Builder()
+                            .setServiceUuid(new ParcelUuid(ATV_REMOTE_UUID))
+                            .build());
                     settingBuilder.setScanMode(ScanSettings.SCAN_MODE_LOW_LATENCY);
                     break;
                 case COMMAND_SCAN_WITH_FILTER:
@@ -166,6 +175,7 @@
             ScanRecord record = result.getScanRecord();
             String mac = result.getDevice().getAddress();
             Map<ParcelUuid, byte[]> serviceData = record.getServiceData();
+            List<ParcelUuid> serviceUuids = record.getServiceUuids();
 
             if (serviceData.get(new ParcelUuid(BleAdvertiserService.POWER_LEVEL_UUID)) != null) {
                 byte[] data =
@@ -189,6 +199,24 @@
                             sendBroadcast(newIntent);
                         }
                     }
+                    if (data[2] == AdvertiseSettings.ADVERTISE_TX_POWER_MEDIUM) {
+                        mMediumRcvd = true;
+                    }
+                }
+            } else if (!mMediumRcvd && serviceUuids != null
+                    && serviceUuids.contains(new ParcelUuid(ATV_REMOTE_UUID))) {
+                // If we're not receiving medium power advertising, allow Android TV Remote Service
+                // advertising packets, which are medium power, to be counted
+                String deviceMac = result.getDevice().getAddress();
+                if (deviceMac != null && mOldMac != null && deviceMac.equals(mOldMac)) {
+                    Intent powerIntent = new Intent(BLE_POWER_LEVEL);
+                    powerIntent.putExtra(EXTRA_MAC_ADDRESS, deviceMac);
+                    // These packets don't include TxPower, assume a valid power level
+                    powerIntent.putExtra(EXTRA_POWER_LEVEL, MEDIUM_POWER_DBM);
+                    powerIntent.putExtra(EXTRA_RSSI, new Integer(result.getRssi()).toString());
+                    powerIntent.putExtra(EXTRA_POWER_LEVEL_BIT,
+                            AdvertiseSettings.ADVERTISE_TX_POWER_MEDIUM);
+                    sendBroadcast(powerIntent);
                 }
             }
 
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/FocusLossPreventionService.java b/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/FocusLossPreventionService.java
new file mode 100644
index 0000000..a232bb7
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/FocusLossPreventionService.java
@@ -0,0 +1,60 @@
+package com.android.cts.verifier.bluetooth;
+
+import android.app.Notification;
+import android.app.NotificationChannel;
+import android.app.NotificationManager;
+import android.app.Service;
+import android.content.Context;
+import android.content.Intent;
+import android.os.IBinder;
+import com.android.cts.verifier.R;
+
+public class FocusLossPreventionService extends Service {
+
+  public static final String TAG = "FocusLossPreventionService";
+
+  private static final String NOTIFICATION_CHANNEL_ID = "ctsVerifier/" + TAG;
+
+  @Override
+  public void onCreate() {
+    super.onCreate();
+  }
+
+  @Override
+  public int onStartCommand(Intent intent, int flags, int startId) {
+    Context context = getApplicationContext();
+    String title = getResources().getString(R.string.app_name);
+    String channelId = "default";
+
+    NotificationManager notificationManager = getSystemService(NotificationManager.class);
+
+    if (notificationManager != null) {
+      notificationManager.createNotificationChannel(
+          new NotificationChannel(
+              NOTIFICATION_CHANNEL_ID,
+              NOTIFICATION_CHANNEL_ID,
+              NotificationManager.IMPORTANCE_DEFAULT));
+
+      Notification notification =
+          new Notification.Builder(context, NOTIFICATION_CHANNEL_ID)
+              .setContentTitle(title)
+              .setSmallIcon(android.R.drawable.stat_sys_data_bluetooth)
+              .build();
+
+      startForeground(1, notification);
+    }
+
+    return START_NOT_STICKY;
+  }
+
+  @Override
+  public void onDestroy() {
+    super.onDestroy();
+    stopForeground(true /* removeNotification */);
+  }
+
+  @Override
+  public IBinder onBind(Intent intent) {
+    return null;
+  }
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/HidConstants.java b/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/HidConstants.java
new file mode 100644
index 0000000..ee7fda5
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/HidConstants.java
@@ -0,0 +1,126 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cts.verifier.bluetooth;
+
+public class HidConstants {
+    public static final byte ID_KEYBOARD = 1;
+    public static final byte ID_MOUSE = 2;
+
+    public static final byte[] HIDD_REPORT_DESC = {
+            (byte) 0x05,
+            (byte) 0x01, // Usage page (Generic Desktop)
+            (byte) 0x09,
+            (byte) 0x06, // Usage (Keyboard)
+            (byte) 0xA1,
+            (byte) 0x01, // Collection (Application)
+            (byte) 0x85,
+            ID_KEYBOARD, //    Report ID
+            (byte) 0x05,
+            (byte) 0x07, //       Usage page (Key Codes)
+            (byte) 0x19,
+            (byte) 0xE0, //       Usage minimum (224)
+            (byte) 0x29,
+            (byte) 0xE7, //       Usage maximum (231)
+            (byte) 0x15,
+            (byte) 0x00, //       Logical minimum (0)
+            (byte) 0x25,
+            (byte) 0x01, //       Logical maximum (1)
+            (byte) 0x75,
+            (byte) 0x01, //       Report size (1)
+            (byte) 0x95,
+            (byte) 0x08, //       Report count (8)
+            (byte) 0x81,
+            (byte) 0x02, //       Input (Data, Variable, Absolute) ; Modifier byte
+            (byte) 0x75,
+            (byte) 0x08, //       Report size (8)
+            (byte) 0x95,
+            (byte) 0x01, //       Report count (1)
+            (byte) 0x81,
+            (byte) 0x01, //       Input (Constant)                 ; Reserved byte
+            (byte) 0x75,
+            (byte) 0x08, //       Report size (8)
+            (byte) 0x95,
+            (byte) 0x06, //       Report count (6)
+            (byte) 0x15,
+            (byte) 0x00, //       Logical Minimum (0)
+            (byte) 0x25,
+            (byte) 0x65, //       Logical Maximum (101)
+            (byte) 0x05,
+            (byte) 0x07, //       Usage page (Key Codes)
+            (byte) 0x19,
+            (byte) 0x00, //       Usage Minimum (0)
+            (byte) 0x29,
+            (byte) 0x65, //       Usage Maximum (101)
+            (byte) 0x81,
+            (byte) 0x00, //       Input (Data, Array)              ; Key array (6 keys)
+            (byte) 0xC0, // End Collection
+            (byte) 0x05,
+            (byte) 0x01, // Usage Page (Generic Desktop)
+            (byte) 0x09,
+            (byte) 0x02, // Usage (Mouse)
+            (byte) 0xA1,
+            (byte) 0x01, // Collection (Application)
+            (byte) 0x85,
+            ID_MOUSE, //    Report ID
+            (byte) 0x09,
+            (byte) 0x01, //    Usage (Pointer)
+            (byte) 0xA1,
+            (byte) 0x00, //    Collection (Physical)
+            (byte) 0x05,
+            (byte) 0x09, //       Usage Page (Buttons)
+            (byte) 0x19,
+            (byte) 0x01, //       Usage minimum (1)
+            (byte) 0x29,
+            (byte) 0x03, //       Usage maximum (3)
+            (byte) 0x15,
+            (byte) 0x00, //       Logical minimum (0)
+            (byte) 0x25,
+            (byte) 0x01, //       Logical maximum (1)
+            (byte) 0x75,
+            (byte) 0x01, //       Report size (1)
+            (byte) 0x95,
+            (byte) 0x03, //       Report count (3)
+            (byte) 0x81,
+            (byte) 0x02, //       Input (Data, Variable, Absolute)
+            (byte) 0x75,
+            (byte) 0x05, //       Report size (5)
+            (byte) 0x95,
+            (byte) 0x01, //       Report count (1)
+            (byte) 0x81,
+            (byte) 0x01, //       Input (constant)                 ; 5 bit padding
+            (byte) 0x05,
+            (byte) 0x01, //       Usage page (Generic Desktop)
+            (byte) 0x09,
+            (byte) 0x30, //       Usage (X)
+            (byte) 0x09,
+            (byte) 0x31, //       Usage (Y)
+            (byte) 0x09,
+            (byte) 0x38, //       Usage (Wheel)
+            (byte) 0x15,
+            (byte) 0x81, //       Logical minimum (-127)
+            (byte) 0x25,
+            (byte) 0x7F, //       Logical maximum (127)
+            (byte) 0x75,
+            (byte) 0x08, //       Report size (8)
+            (byte) 0x95,
+            (byte) 0x03, //       Report count (3)
+            (byte) 0x81,
+            (byte) 0x06, //       Input (Data, Variable, Relative)
+            (byte) 0xC0, //    End Collection
+            (byte) 0xC0 // End Collection
+    };
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/HidDeviceActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/HidDeviceActivity.java
new file mode 100644
index 0000000..c5cffc9
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/HidDeviceActivity.java
@@ -0,0 +1,309 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cts.verifier.bluetooth;
+
+import android.bluetooth.BluetoothAdapter;
+import android.bluetooth.BluetoothDevice;
+import android.bluetooth.BluetoothHidDevice;
+import android.bluetooth.BluetoothHidDeviceAppQosSettings;
+import android.bluetooth.BluetoothHidDeviceAppSdpSettings;
+import android.bluetooth.BluetoothProfile;
+import android.content.Intent;
+import android.content.pm.PackageManager;
+import android.os.Bundle;
+
+import android.util.Log;
+import android.view.View;
+import android.view.View.OnClickListener;
+import android.widget.Button;
+import com.android.cts.verifier.PassFailButtons;
+import com.android.cts.verifier.R;
+
+import java.util.List;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+
+public class HidDeviceActivity extends PassFailButtons.Activity {
+    private static final String TAG = HidDeviceActivity.class.getSimpleName();
+    private static final int MSG_APP_STATUS_CHANGED = 0;
+    private static final String SDP_NAME = "CtsVerifier";
+    private static final String SDP_DESCRIPTION = "CtsVerifier HID Device test";
+    private static final String SDP_PROVIDER = "Android";
+    private static final int QOS_TOKEN_RATE = 800; // 9 bytes * 1000000 us / 11250 us
+    private static final int QOS_TOKEN_BUCKET_SIZE = 9;
+    private static final int QOS_PEAK_BANDWIDTH = 0;
+    private static final int QOS_LATENCY = 11250;
+    static final String SAMPLE_INPUT = "bluetooth";
+
+    private BluetoothAdapter mBluetoothAdapter;
+    private BluetoothHidDevice mBluetoothHidDevice;
+    private BluetoothDevice mHidHost;
+    private ExecutorService mExecutor;
+
+    private Button mRegisterAppButton;
+    private Button mMakeDiscoverableButton;
+    private Button mUnregisterAppButton;
+    private Button mSendReportButton;
+    private Button mReplyReportButton;
+    private Button mReportErrorButton;
+
+    private BluetoothProfile.ServiceListener mProfileListener =
+            new BluetoothProfile.ServiceListener() {
+        public void onServiceConnected(int profile, BluetoothProfile proxy) {
+            if (profile == BluetoothProfile.HID_DEVICE) {
+                mBluetoothHidDevice = (BluetoothHidDevice) proxy;
+            }
+        }
+
+        public void onServiceDisconnected(int profile) {
+            if (profile == BluetoothProfile.HID_DEVICE) {
+                mBluetoothHidDevice = null;
+            }
+        }
+    };
+
+    private final BluetoothHidDeviceAppSdpSettings mSdpSettings =
+            new BluetoothHidDeviceAppSdpSettings(
+                    SDP_NAME,
+                    SDP_DESCRIPTION,
+                    SDP_PROVIDER,
+                    BluetoothHidDevice.SUBCLASS1_COMBO,
+                    HidConstants.HIDD_REPORT_DESC);
+
+    private final BluetoothHidDeviceAppQosSettings mOutQos =
+            new BluetoothHidDeviceAppQosSettings(
+                    BluetoothHidDeviceAppQosSettings.SERVICE_BEST_EFFORT,
+                    QOS_TOKEN_RATE,
+                    QOS_TOKEN_BUCKET_SIZE,
+                    QOS_PEAK_BANDWIDTH,
+                    QOS_LATENCY,
+                    BluetoothHidDeviceAppQosSettings.MAX);
+
+    private BluetoothHidDevice.Callback mCallback = new BluetoothHidDevice.Callback() {
+        @Override
+        public void onAppStatusChanged(BluetoothDevice pluggedDevice, boolean registered) {
+            Log.d(TAG, "onAppStatusChanged: pluggedDevice=" + pluggedDevice + " registered="
+                    + registered);
+        }
+    };
+
+    @Override
+    protected void onCreate(Bundle savedInstanceState) {
+        super.onCreate(savedInstanceState);
+        setContentView(R.layout.bt_hid_device);
+        setPassFailButtonClickListeners();
+        setInfoResources(R.string.bt_hid_device_test_name, R.string.bt_hid_device_test_info, -1);
+
+        mBluetoothAdapter = BluetoothAdapter.getDefaultAdapter();
+        mBluetoothAdapter.getProfileProxy(getApplicationContext(), mProfileListener,
+                BluetoothProfile.HID_DEVICE);
+        mExecutor = Executors.newSingleThreadExecutor();
+
+        mRegisterAppButton = (Button) findViewById(R.id.bt_hid_device_register_button);
+        mRegisterAppButton.setOnClickListener(new OnClickListener() {
+            @Override
+            public void onClick(View v) {
+                register();
+            }
+        });
+
+        mUnregisterAppButton = (Button) findViewById(R.id.bt_hid_device_unregister_button);
+        mUnregisterAppButton.setOnClickListener(new OnClickListener() {
+            @Override
+            public void onClick(View v) {
+                unregister();
+            }
+        });
+
+        mMakeDiscoverableButton = (Button) findViewById(R.id.bt_hid_device_discoverable_button);
+        mMakeDiscoverableButton.setOnClickListener(new OnClickListener() {
+            @Override
+            public void onClick(View v) {
+                makeDiscoverable();
+            }
+        });
+
+        mSendReportButton = (Button) findViewById(R.id.bt_hid_device_send_report_button);
+        mSendReportButton.setOnClickListener(new OnClickListener() {
+            @Override
+            public void onClick(View v) {
+                testSendReport();
+            }
+        });
+
+        mReplyReportButton = (Button) findViewById(R.id.bt_hid_device_reply_report_button);
+        mReplyReportButton.setEnabled(false);
+        mReplyReportButton.setOnClickListener(new OnClickListener() {
+            @Override
+            public void onClick(View v) {
+                testReplyReport();
+            }
+        });
+
+        mReportErrorButton = (Button) findViewById(R.id.bt_hid_device_report_error_button);
+        mReportErrorButton.setEnabled(false);
+        mReportErrorButton.setOnClickListener(new OnClickListener() {
+            @Override
+            public void onClick(View v) {
+                testReportError();
+            }
+        });
+
+        if (isAndroidTv()) {
+            startForegroundService(new Intent(getApplication(),
+                  FocusLossPreventionService.class));
+        }
+    }
+
+    @Override
+    protected void onDestroy() {
+        super.onDestroy();
+        unregister();
+
+        if (isAndroidTv()) {
+            stopService(new Intent(getApplication(),
+                  FocusLossPreventionService.class));
+        }
+    }
+
+    private boolean register() {
+        return mBluetoothHidDevice != null
+                && mBluetoothHidDevice.registerApp(mSdpSettings, null, mOutQos, mExecutor,
+                mCallback);
+    }
+
+    private void makeDiscoverable() {
+        if (mBluetoothAdapter.getScanMode() !=
+                BluetoothAdapter.SCAN_MODE_CONNECTABLE_DISCOVERABLE) {
+            Intent intent = new Intent(BluetoothAdapter.ACTION_REQUEST_DISCOVERABLE);
+            intent.putExtra(BluetoothAdapter.EXTRA_DISCOVERABLE_DURATION, 30);
+            startActivity(intent);
+        }
+    }
+
+    private boolean unregister() {
+        return mBluetoothHidDevice != null && mBluetoothHidDevice.unregisterApp();
+    }
+
+
+    private boolean getConnectedDevice() {
+        if (mBluetoothHidDevice == null) {
+            Log.w(TAG, "mBluetoothHidDevice is null");
+            return false;
+        }
+
+        List<BluetoothDevice> connectedDevices = mBluetoothHidDevice.getConnectedDevices();
+        if (connectedDevices.size() == 0) {
+            return false;
+        } else {
+            return false;
+        }
+    }
+
+    private void testSendReport() {
+        if (mBluetoothHidDevice == null) {
+            Log.w(TAG, "mBluetoothHidDevice is null");
+            return;
+        }
+
+        if (mHidHost == null) {
+            if (mBluetoothHidDevice.getConnectedDevices().size() == 0) {
+                Log.w(TAG, "HID host not connected");
+                return;
+            } else {
+                mHidHost = mBluetoothHidDevice.getConnectedDevices().get(0);
+                Log.d(TAG, "connected to: " + mHidHost);
+            }
+        }
+        for (char c : SAMPLE_INPUT.toCharArray()) {
+            mBluetoothHidDevice.sendReport(mHidHost, BluetoothHidDevice.REPORT_TYPE_INPUT,
+                    singleKeyHit(charToKeyCode(c)));
+            mBluetoothHidDevice.sendReport(mHidHost, BluetoothHidDevice.REPORT_TYPE_INPUT,
+                    singleKeyHit((byte) 0));
+        }
+        mReplyReportButton.setEnabled(true);
+
+    }
+
+    private void testReplyReport() {
+        if (mBluetoothHidDevice == null) {
+            Log.w(TAG, "mBluetoothHidDevice is null");
+            return;
+        }
+
+        if (mHidHost == null) {
+            if (mBluetoothHidDevice.getConnectedDevices().size() == 0) {
+                Log.w(TAG, "HID host not connected");
+                return;
+            } else {
+                mHidHost = mBluetoothHidDevice.getConnectedDevices().get(0);
+                Log.d(TAG, "connected to: " + mHidHost);
+            }
+        }
+        if (mBluetoothHidDevice.replyReport(mHidHost, (byte) 0, (byte) 0,
+                singleKeyHit((byte) 0))) {
+            mReportErrorButton.setEnabled(true);
+        }
+    }
+
+    private void testReportError() {
+        if (mBluetoothHidDevice == null) {
+            Log.w(TAG, "mBluetoothHidDevice is null");
+            return;
+        }
+
+        if (mHidHost == null) {
+            if (mBluetoothHidDevice.getConnectedDevices().size() == 0) {
+                Log.w(TAG, "HID host not connected");
+                return;
+            } else {
+                mHidHost = mBluetoothHidDevice.getConnectedDevices().get(0);
+                Log.d(TAG, "connected to: " + mHidHost);
+            }
+        }
+        if (mBluetoothHidDevice.reportError(mHidHost, (byte) 0)) {
+            getPassButton().setEnabled(true);
+        }
+    }
+
+
+    private byte[] singleKeyHit(byte code) {
+        byte[] keyboardData = new byte[8];
+        keyboardData[0] = 0;
+        keyboardData[1] = 0;
+        keyboardData[2] = code;
+        keyboardData[3] = 0;
+        keyboardData[4] = 0;
+        keyboardData[5] = 0;
+        keyboardData[6] = 0;
+        keyboardData[7] = 0;
+        return keyboardData;
+    }
+
+    private byte charToKeyCode(char c) {
+        if (c < 'a' || c > 'z') {
+            return 0;
+        }
+        return (byte) (c - 'a' + 0x04);
+    }
+
+    private boolean isAndroidTv() {
+        final PackageManager pm = getApplicationContext().getPackageManager();
+        return pm.hasSystemFeature(PackageManager.FEATURE_TELEVISION)
+                  || pm.hasSystemFeature(PackageManager.FEATURE_LEANBACK);
+    }
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/HidHostActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/HidHostActivity.java
new file mode 100644
index 0000000..2e70b45
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/bluetooth/HidHostActivity.java
@@ -0,0 +1,106 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.cts.verifier.bluetooth;
+
+import android.bluetooth.BluetoothAdapter;
+import android.bluetooth.BluetoothDevice;
+import android.content.Intent;
+import android.os.Bundle;
+import android.text.Editable;
+import android.text.TextWatcher;
+import android.util.Log;
+
+import android.view.View;
+import android.view.View.OnClickListener;
+import android.widget.Button;
+import android.widget.EditText;
+
+import com.android.cts.verifier.PassFailButtons;
+import com.android.cts.verifier.R;
+import com.android.cts.verifier.TestResult;
+
+public class HidHostActivity extends PassFailButtons.Activity {
+    private static final String TAG = HidHostActivity.class.getSimpleName();
+
+    BluetoothAdapter mBluetoothAdapter;
+
+    private static final int ENABLE_BLUETOOTH_REQUEST = 1;
+    private static final int PICK_SERVER_DEVICE_REQUEST = 2;
+
+    private String mDeviceAddress;
+
+    private Button mPickDeviceButton;
+    private EditText mEditText;
+
+    @Override
+    protected void onCreate(Bundle savedInstanceState) {
+        super.onCreate(savedInstanceState);
+        setContentView(R.layout.bt_hid_host);
+        setPassFailButtonClickListeners();
+        setInfoResources(R.string.bt_hid_host_test_name, R.string.bt_hid_host_test_info, -1);
+        mBluetoothAdapter = BluetoothAdapter.getDefaultAdapter();
+
+        mEditText = (EditText) findViewById(R.id.bt_hid_host_edit_text);
+        mEditText.addTextChangedListener(new TextWatcher() {
+            @Override
+            public void beforeTextChanged(CharSequence s, int start, int count, int after) {}
+
+            @Override
+            public void onTextChanged(CharSequence s, int start, int before, int count) {
+                if (s.toString().equals(HidDeviceActivity.SAMPLE_INPUT)) {
+                    getPassButton().setEnabled(true);
+                }
+            }
+
+            @Override
+            public void afterTextChanged(Editable s) {}
+        });
+        mPickDeviceButton = (Button) findViewById(R.id.bt_hid_host_pick_device_button);
+        mPickDeviceButton.setOnClickListener(new OnClickListener() {
+            @Override
+            public void onClick(View v) {
+                Intent intent = new Intent(HidHostActivity.this,
+                        DevicePickerActivity.class);
+                startActivityForResult(intent, PICK_SERVER_DEVICE_REQUEST);
+                mEditText.requestFocus();
+            }
+        });
+    }
+
+    @Override
+    protected void onActivityResult(int requestCode, int resultCode, Intent data) {
+        super.onActivityResult(requestCode, resultCode, data);
+        switch (requestCode) {
+            case PICK_SERVER_DEVICE_REQUEST:
+                Log.d(TAG, "pick device: " + resultCode);
+                if (resultCode == RESULT_OK) {
+                    mDeviceAddress = data.getStringExtra(DevicePickerActivity.EXTRA_DEVICE_ADDRESS);
+                    connectToDevice();
+                } else {
+                    setResult(RESULT_CANCELED);
+                    finish();
+                }
+                break;
+        }
+    }
+
+    private boolean connectToDevice() {
+        BluetoothDevice bluetoothDevice = mBluetoothAdapter.getRemoteDevice(mDeviceAddress);
+        bluetoothDevice.createBond();
+        return true;
+    }
+}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/camera/formats/CameraFormatsActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/camera/formats/CameraFormatsActivity.java
index 9c5b31d..07c598f 100755
--- a/apps/CtsVerifier/src/com/android/cts/verifier/camera/formats/CameraFormatsActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/camera/formats/CameraFormatsActivity.java
@@ -40,6 +40,7 @@
 import android.view.TextureView;
 import android.widget.AdapterView;
 import android.widget.ArrayAdapter;
+import android.widget.Button;
 import android.widget.ImageButton;
 import android.widget.ImageView;
 import android.widget.Spinner;
@@ -51,6 +52,7 @@
 import java.util.HashSet;
 import java.util.Comparator;
 import java.util.List;
+import java.util.Optional;
 import java.util.TreeSet;
 
 /**
@@ -99,14 +101,44 @@
     private boolean mProcessInProgress = false;
     private boolean mProcessingFirstFrame = false;
 
-    private TreeSet<String> mTestedCombinations = new TreeSet<String>();
-    private TreeSet<String> mUntestedCombinations = new TreeSet<String>();
+    private final TreeSet<CameraCombination> mTestedCombinations = new TreeSet<>(COMPARATOR);
+    private final TreeSet<CameraCombination> mUntestedCombinations = new TreeSet<>(COMPARATOR);
 
     private int mAllCombinationsSize = 0;
 
     // Menu to show the test progress
     private static final int MENU_ID_PROGRESS = Menu.FIRST + 1;
 
+    private class CameraCombination {
+        private final int mCameraIndex;
+        private final int mResolutionIndex;
+        private final int mFormatIndex;
+        private final int mResolutionWidth;
+        private final int mResolutionHeight;
+        private final String mFormatName;
+
+        private CameraCombination(int cameraIndex, int resolutionIndex, int formatIndex,
+            int resolutionWidth, int resolutionHeight, String formatName) {
+            this.mCameraIndex = cameraIndex;
+            this.mResolutionIndex = resolutionIndex;
+            this.mFormatIndex = formatIndex;
+            this.mResolutionWidth = resolutionWidth;
+            this.mResolutionHeight = resolutionHeight;
+            this.mFormatName = formatName;
+        }
+
+        @Override
+        public String toString() {
+            return String.format("Camera %d, %dx%d, %s",
+                mCameraIndex, mResolutionWidth, mResolutionHeight, mFormatName);
+        }
+    }
+
+    private static final Comparator<CameraCombination> COMPARATOR =
+        Comparator.<CameraCombination, Integer>comparing(c -> c.mCameraIndex)
+            .thenComparing(c -> c.mResolutionIndex)
+            .thenComparing(c -> c.mFormatIndex);
+
     @Override
     public void onCreate(Bundle savedInstanceState) {
         super.onCreate(savedInstanceState);
@@ -130,7 +162,6 @@
         String[] cameraNames = new String[numCameras];
         for (int i = 0; i < numCameras; i++) {
             cameraNames[i] = "Camera " + i;
-            mUntestedCombinations.add("All combinations for Camera " + i + "\n");
         }
         mCameraSpinner = (Spinner) findViewById(R.id.cameras_selection);
         mCameraSpinner.setAdapter(
@@ -171,6 +202,35 @@
         yTotal.setConcat(y2r, yOffset);
 
         mYuv2RgbFilter = new ColorMatrixColorFilter(yTotal);
+
+        Button mNextButton = findViewById(R.id.next_button);
+        mNextButton.setOnClickListener(v -> {
+                setUntestedCombination();
+                startPreview();
+        });
+    }
+
+    /**
+     * Set an untested combination of resolution and format for the current camera.
+     * Triggered by next button click.
+     */
+    private void setUntestedCombination() {
+        Optional<CameraCombination> combination = mUntestedCombinations.stream().filter(
+            c -> c.mCameraIndex == mCurrentCameraId).findFirst();
+        if (!combination.isPresent()) {
+            Toast.makeText(this, "All Camera " + mCurrentCameraId + " tests are done.",
+                Toast.LENGTH_SHORT).show();
+            return;
+        }
+
+        // There is untested combination for the current camera, set the next untested combination.
+        int mNextResolutionIndex = combination.get().mResolutionIndex;
+        int mNextFormatIndex = combination.get().mFormatIndex;
+
+        mNextPreviewSize = mPreviewSizes.get(mNextResolutionIndex);
+        mResolutionSpinner.setSelection(mNextResolutionIndex);
+        mNextPreviewFormat = mPreviewFormats.get(mNextFormatIndex);
+        mFormatSpinner.setSelection(mNextFormatIndex);
     }
 
     @Override
@@ -183,13 +243,13 @@
     public boolean onOptionsItemSelected(MenuItem item) {
         boolean ret = true;
         switch (item.getItemId()) {
-        case MENU_ID_PROGRESS:
-            showCombinationsDialog();
-            ret = true;
-            break;
-        default:
-            ret = super.onOptionsItemSelected(item);
-            break;
+            case MENU_ID_PROGRESS:
+                showCombinationsDialog();
+                ret = true;
+                break;
+            default:
+                ret = super.onOptionsItemSelected(item);
+                break;
         }
         return ret;
     }
@@ -222,17 +282,19 @@
     public String getTestDetails() {
         StringBuilder reportBuilder = new StringBuilder();
         reportBuilder.append("Tested combinations:\n");
-        for (String combination: mTestedCombinations) {
+        for (CameraCombination combination: mTestedCombinations) {
             reportBuilder.append(combination);
+            reportBuilder.append("\n");
         }
+
         reportBuilder.append("Untested combinations:\n");
-        for (String combination: mUntestedCombinations) {
+        for (CameraCombination combination: mUntestedCombinations) {
             reportBuilder.append(combination);
+            reportBuilder.append("\n");
         }
         return reportBuilder.toString();
     }
 
-
     public void onSurfaceTextureAvailable(SurfaceTexture surface,
             int width, int height) {
         mPreviewTexture = surface;
@@ -240,7 +302,7 @@
                 || mFormatView.getMeasuredHeight() != height) {
             mPreviewTexWidth = mFormatView.getMeasuredWidth();
             mPreviewTexHeight = mFormatView.getMeasuredHeight();
-         } else {
+        } else {
             mPreviewTexWidth = width;
             mPreviewTexHeight = height;
         }
@@ -310,8 +372,6 @@
 
             };
 
-
-
     private void setUpCamera(int id) {
         shutdownCamera();
 
@@ -331,7 +391,7 @@
                 if (lhs.height > rhs.height) return 1;
                 return 0;
             }
-        };
+        }
 
         SizeCompare s = new SizeCompare();
         TreeSet<Camera.Size> sortedResolutions = new TreeSet<Camera.Size>(s);
@@ -365,13 +425,14 @@
 
         // Update untested entries
 
-        mUntestedCombinations.remove("All combinations for Camera " + id + "\n");
-        for (Camera.Size previewSize: mPreviewSizes) {
-            for (int previewFormat: mPreviewFormats) {
-                String combination = "Camera " + id + ", "
-                        + previewSize.width + "x" + previewSize.height
-                        + ", " + mPreviewFormatNames.get(previewFormat)
-                        + "\n";
+        for (int resolutionIndex = 0; resolutionIndex < mPreviewSizes.size(); resolutionIndex++) {
+            for (int formatIndex = 0; formatIndex < mPreviewFormats.size(); formatIndex++) {
+                CameraCombination combination = new CameraCombination(
+                    id, resolutionIndex, formatIndex,
+                    mPreviewSizes.get(resolutionIndex).width,
+                    mPreviewSizes.get(resolutionIndex).height,
+                    mPreviewFormatNames.get(mPreviewFormats.get(formatIndex)));
+
                 if (!mTestedCombinations.contains(combination)) {
                     mUntestedCombinations.add(combination);
                 }
@@ -571,14 +632,20 @@
                 mFormatView.setImageBitmap(mCallbackBitmap);
                 if (mProcessingFirstFrame) {
                     mProcessingFirstFrame = false;
-                    String combination = "Camera " + mCurrentCameraId + ", "
-                            + mPreviewSize.width + "x" + mPreviewSize.height
-                            + ", " + mPreviewFormatNames.get(mPreviewFormat)
-                            + "\n";
+
+                    CameraCombination combination = new CameraCombination(
+                        mCurrentCameraId,
+                        mResolutionSpinner.getSelectedItemPosition(),
+                        mFormatSpinner.getSelectedItemPosition(),
+                        mPreviewSizes.get(mResolutionSpinner.getSelectedItemPosition()).width,
+                        mPreviewSizes.get(mResolutionSpinner.getSelectedItemPosition()).height,
+                        mPreviewFormatNames.get(
+                            mPreviewFormats.get(mFormatSpinner.getSelectedItemPosition())));
+
                     mUntestedCombinations.remove(combination);
                     mTestedCombinations.add(combination);
 
-                    displayToast(combination.replace("\n", ""));
+                    displayToast(combination.toString());
 
                     if (mTestedCombinations.size() == mAllCombinationsSize) {
                         setPassButtonEnabled(true);
@@ -617,7 +684,8 @@
     }
 
     private void displayToast(String combination) {
-        Toast.makeText(this, "\"" + combination + "\"\n" + " has been tested.", Toast.LENGTH_LONG).show();
+        Toast.makeText(this, "\"" + combination + "\"\n" + " has been tested.", Toast.LENGTH_SHORT)
+            .show();
     }
 
     public void onPreviewFrame(byte[] data, Camera camera) {
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/camera/fov/PhotoCaptureActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/camera/fov/PhotoCaptureActivity.java
index 8839d52..b627797 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/camera/fov/PhotoCaptureActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/camera/fov/PhotoCaptureActivity.java
@@ -26,6 +26,9 @@
 import android.hardware.Camera;
 import android.hardware.Camera.PictureCallback;
 import android.hardware.Camera.ShutterCallback;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraManager;
 import android.os.Bundle;
 import android.os.PowerManager;
 import android.os.PowerManager.WakeLock;
@@ -104,6 +107,13 @@
         super.onCreate(savedInstanceState);
         setContentView(R.layout.camera_fov_calibration_photo_capture);
 
+        int cameraToBeTested = 0;
+        for (int cameraId = 0; cameraId < Camera.getNumberOfCameras(); ++cameraId) {
+            if (!isExternalCamera(cameraId)) {
+                cameraToBeTested++;
+            }
+        }
+
         mPreview = (SurfaceView) findViewById(R.id.camera_fov_camera_preview);
         mSurfaceHolder = mPreview.getHolder();
         mSurfaceHolder.addCallback(this);
@@ -139,7 +149,9 @@
                 mPreviewSizeCamerasToProcess.clear();
                 mPreviewSizes =  new Size[Camera.getNumberOfCameras()];
                 for (int cameraId = 0; cameraId < Camera.getNumberOfCameras(); ++cameraId) {
-                    mPreviewSizeCamerasToProcess.add(cameraId);
+                    if (!isExternalCamera(cameraId)) {
+                        mPreviewSizeCamerasToProcess.add(cameraId);
+                    }
                 }
                 showNextDialogToChoosePreviewSize();
             }
@@ -181,9 +193,19 @@
                 }
             }
 
-        @Override
-        public void onNothingSelected(AdapterView<?> arg0) {}
+            @Override
+            public void onNothingSelected(AdapterView<?> arg0) {}
         });
+
+        if (cameraToBeTested == 0) {
+            Log.i(TAG, "No cameras needs to be tested. Setting test pass.");
+            Toast.makeText(this, "No cameras needs to be tested. Test pass.",
+                    Toast.LENGTH_LONG).show();
+
+            TestResult.setPassedResult(this, getClass().getName(),
+                    "All cameras are external, test skipped!");
+            finish();
+        }
     }
 
     @Override
@@ -198,6 +220,10 @@
             mSupportedResolutions = new ArrayList<SelectableResolution>();
             int numCameras = Camera.getNumberOfCameras();
             for (int cameraId = 0; cameraId < numCameras; ++cameraId) {
+                if (isExternalCamera(cameraId)) {
+                    continue;
+                }
+
                 Camera camera = Camera.open(cameraId);
 
                 // Get the supported picture sizes and fill the spinner.
@@ -548,4 +574,23 @@
             mPreviewOrientation = mJpegOrientation;
         }
     }
+
+    private boolean isExternalCamera(int cameraId) {
+        CameraManager manager = (CameraManager) this.getSystemService(Context.CAMERA_SERVICE);
+        try {
+            String cameraIdStr = manager.getCameraIdList()[cameraId];
+            CameraCharacteristics characteristics =
+                    manager.getCameraCharacteristics(cameraIdStr);
+
+            if (characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL) ==
+                            CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL) {
+                // External camera doesn't support FOV informations
+                return true;
+            }
+        } catch (CameraAccessException e) {
+            Toast.makeText(this, "Could not access camera " + cameraId +
+                    ": " + e.getMessage(), Toast.LENGTH_LONG).show();
+        }
+        return false;
+    }
 }
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/camera/intents/CameraIntentsActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/camera/intents/CameraIntentsActivity.java
index 161cceb..7c0e475 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/camera/intents/CameraIntentsActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/camera/intents/CameraIntentsActivity.java
@@ -23,6 +23,7 @@
 import android.content.Intent;
 import android.content.IntentFilter;
 import android.hardware.Camera;
+import android.media.ExifInterface;
 import android.net.Uri;
 import android.os.AsyncTask;
 import android.os.Bundle;
@@ -34,13 +35,21 @@
 import android.widget.Button;
 import android.widget.ImageButton;
 import android.widget.TextView;
+import android.widget.Toast;
+
+import androidx.core.content.FileProvider;
 
 import com.android.cts.verifier.camera.intents.CameraContentJobService;
 import com.android.cts.verifier.PassFailButtons;
 import com.android.cts.verifier.R;
 import com.android.cts.verifier.TestResult;
 
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
 import java.util.TreeSet;
+import java.util.Date;
+import java.text.SimpleDateFormat;
 
 /**
  * Tests for manual verification of uri trigger being fired.
@@ -80,6 +89,8 @@
     private ImageButton mPassButton;
     private ImageButton mFailButton;
     private Button mStartTestButton;
+    private File mDebugFolder = null;
+    private File mImageTarget = null;
 
     private int mState = STATE_OFF;
 
@@ -271,27 +282,66 @@
     @Override
     protected void onActivityResult(
         int requestCode, int resultCode, Intent data) {
-        if (requestCode == 1337 + getStageIndex()) {
+        int stageIndex = getStageIndex();
+        if (requestCode == 1337 + stageIndex) {
             Log.v(TAG, "Activity we launched was finished");
             mActivityResult = true;
 
             if (mState != STATE_FAILED
                 && getStageIndex() == STAGE_INTENT_PICTURE) {
-                mPassButton.setEnabled(true);
-                mFailButton.setEnabled(false);
-
-                mState = STATE_SUCCESSFUL;
-                /* successful, unless we get the URI trigger back
-                 at some point later on */
+                handleIntentPictureResult();
             }
         }
     }
 
+    private void handleIntentPictureResult() {
+        if (mImageTarget == null) {
+            Log.d(TAG, "Image target was not set");
+            return;
+        }
+        try {
+            if (!mImageTarget.exists() || mImageTarget.length() == 0) {
+                Log.d(TAG, "Image target does not exist or it is empty");
+                mState = STATE_FAILED;
+                return;
+            }
+
+            try {
+                final ExifInterface exif = new ExifInterface(new FileInputStream(mImageTarget));
+                if (!checkExifAttribute(exif, ExifInterface.TAG_MAKE)
+                    || !checkExifAttribute(exif, ExifInterface.TAG_MODEL)
+                    || !checkExifAttribute(exif, ExifInterface.TAG_DATETIME)) {
+                    Log.d(TAG, "The required tag does not appear in the exif");
+                    mState = STATE_FAILED;
+                    return;
+                }
+                mState = STATE_SUCCESSFUL;
+                setPassButton(true);
+            } catch (IOException ex) {
+                Log.e(TAG, "Failed to verify Exif", ex);
+                mState = STATE_FAILED;
+                return;
+            }
+        } finally {
+            mImageTarget.delete();
+        }
+    }
+
+    private boolean checkExifAttribute(ExifInterface exif, String tag) {
+        final String res = exif.getAttribute(tag);
+        return res != null && res.length() > 0;
+    }
+
     @Override
     public String getTestDetails() {
         return mReportBuilder.toString();
     }
 
+    private void setPassButton(Boolean pass) {
+        mPassButton.setEnabled(pass);
+        mFailButton.setEnabled(!pass);
+    }
+
     private class WaitForTriggerTask extends AsyncTask<Void, Void, Boolean> {
         protected Boolean doInBackground(Void... param) {
             try {
@@ -387,7 +437,33 @@
 
             if (intentStr != null) {
                 cameraIntent = new Intent(intentStr);
-                startActivityForResult(cameraIntent, 1337 + getStageIndex());
+                switch (stageIndex) {
+                    case STAGE_INTENT_PICTURE:
+                        mDebugFolder = new File(this.getFilesDir(), "debug");
+                        mDebugFolder.mkdirs();
+                        if (!mDebugFolder.exists()) {
+                            Toast.makeText(this, R.string.ci_directory_creation_error,
+                                    Toast.LENGTH_SHORT).show();
+                            Log.v(TAG, "Could not create directory");
+                            return;
+                        }
+
+                        File targetFile;
+                        String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
+                        mImageTarget = new File(mDebugFolder, timeStamp + "capture.jpg");
+                        targetFile = mImageTarget;
+                        cameraIntent.putExtra(MediaStore.EXTRA_OUTPUT, FileProvider.getUriForFile(this,
+                              "com.android.cts.verifier.managedprovisioning.fileprovider",
+                              targetFile));
+                        startActivityForResult(cameraIntent, 1337 + getStageIndex());
+                        break;
+                    case STAGE_INTENT_VIDEO:
+                        startActivityForResult(cameraIntent, 1337 + getStageIndex());
+                        break;
+                    default:
+                        Log.wtf(TAG, "Unexpected stage index to send intent");
+                        return;
+                }
             }
 
             mStartTestButton.setEnabled(false);
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsSerializer.java b/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsSerializer.java
index 2a6c146..fbd7522 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsSerializer.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsSerializer.java
@@ -52,6 +52,7 @@
 import java.util.Arrays;
 import java.util.LinkedList;
 import java.util.List;
+import java.util.Set;
 
 /**
  * Class to deal with serializing and deserializing between JSON and Camera2 objects.
@@ -262,14 +263,18 @@
     @SuppressWarnings("unchecked")
     private static Object serializeLensShadingMap(LensShadingMap map)
             throws org.json.JSONException {
-        JSONArray mapObj = new JSONArray();
+        JSONObject mapObj = new JSONObject();
+        JSONArray mapArr = new JSONArray();
         for (int row = 0; row < map.getRowCount(); row++) {
             for (int col = 0; col < map.getColumnCount(); col++) {
                 for (int ch = 0; ch < 4; ch++) {
-                    mapObj.put(map.getGainFactor(ch, col, row));
+                    mapArr.put(map.getGainFactor(ch, col, row));
                 }
             }
         }
+        mapObj.put("width", map.getColumnCount());
+        mapObj.put("height", map.getRowCount());
+        mapObj.put("map", mapArr);
         return mapObj;
     }
 
@@ -473,6 +478,37 @@
         if (md.getClass() == TotalCaptureResult.class) {
             allFields = CaptureResult.class.getDeclaredFields();
         }
+        if (md.getClass() == CameraCharacteristics.class) {
+            // Special handling for information not stored in metadata keys
+            CameraCharacteristics chars = (CameraCharacteristics) md;
+            List<CameraCharacteristics.Key<?>> charsKeys = chars.getKeys();
+            List<CaptureRequest.Key<?>> requestKeys = chars.getAvailableCaptureRequestKeys();
+            Set<String> physicalCamIds = chars.getPhysicalCameraIds();
+
+            try {
+                JSONArray charKeysArr = new JSONArray();
+                for (CameraCharacteristics.Key<?> k : charsKeys) {
+                    charKeysArr.put(k.getName());
+                }
+                JSONArray reqKeysArr = new JSONArray();
+                for (CaptureRequest.Key<?> k : requestKeys) {
+                    reqKeysArr.put(k.getName());
+                }
+                // Avoid using the hidden metadata key name here to prevent confliction
+                jsonObj.put("camera.characteristics.keys", charKeysArr);
+                jsonObj.put("camera.characteristics.requestKeys", reqKeysArr);
+
+                if (!physicalCamIds.isEmpty()) {
+                    JSONArray physCamIdsArr = new JSONArray();
+                    for (String id : physicalCamIds) {
+                        physCamIdsArr.put(id);
+                    }
+                    jsonObj.put("camera.characteristics.physicalCamIds", physCamIdsArr);
+                }
+            } catch (org.json.JSONException e) {
+                throw new ItsException("JSON error for CameraCharacteristics:", e);
+            }
+        }
         for (Field field : allFields) {
             if (Modifier.isPublic(field.getModifiers()) &&
                     Modifier.isStatic(field.getModifiers()) &&
@@ -673,6 +709,18 @@
                                                 arr.getJSONObject(i).getInt("denominator"));
                                     }
                                     val = new ColorSpaceTransform(a);
+                                } else if (keyType == TonemapCurve.class) {
+                                    JSONObject obj = jsonReq.optJSONObject(keyName);
+                                    String names[] = {"red", "green", "blue"};
+                                    float[][] curves = new float[3][];
+                                    for (int ch = 0; ch < 3; ch++) {
+                                        JSONArray ja = obj.getJSONArray(names[ch]);
+                                        curves[ch] = new float[ja.length()];
+                                        for (int i = 0; i < ja.length(); i++) {
+                                            Array.set(curves[ch], i, (float)ja.getDouble(i));
+                                        }
+                                    }
+                                    val = new TonemapCurve(curves[0], curves[1], curves[2]);
                                 } else if (keyType instanceof ParameterizedType &&
                                         ((ParameterizedType)keyType).getRawType() == Range.class &&
                                         ((ParameterizedType)keyType).getActualTypeArguments().length == 1 &&
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsService.java b/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsService.java
index bd9ebda..fe1c0ed 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsService.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsService.java
@@ -16,6 +16,9 @@
 
 package com.android.cts.verifier.camera.its;
 
+import android.app.Notification;
+import android.app.NotificationChannel;
+import android.app.NotificationManager;
 import android.app.Service;
 import android.content.Context;
 import android.content.Intent;
@@ -32,6 +35,7 @@
 import android.hardware.camera2.TotalCaptureResult;
 import android.hardware.camera2.params.InputConfiguration;
 import android.hardware.camera2.params.MeteringRectangle;
+import android.hardware.camera2.params.OutputConfiguration;
 import android.hardware.Sensor;
 import android.hardware.SensorEvent;
 import android.hardware.SensorEventListener;
@@ -51,6 +55,7 @@
 import android.util.Log;
 import android.util.Rational;
 import android.util.Size;
+import android.util.SparseArray;
 import android.view.Surface;
 
 import com.android.ex.camera2.blocking.BlockingCameraManager;
@@ -59,6 +64,7 @@
 import com.android.ex.camera2.blocking.BlockingSessionCallback;
 
 import com.android.cts.verifier.camera.its.StatsImage;
+import com.android.cts.verifier.R;
 
 import org.json.JSONArray;
 import org.json.JSONObject;
@@ -80,8 +86,10 @@
 import java.security.MessageDigest;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.HashMap;
 import java.util.LinkedList;
 import java.util.List;
+import java.util.Map;
 import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.LinkedBlockingDeque;
@@ -93,6 +101,9 @@
 public class ItsService extends Service implements SensorEventListener {
     public static final String TAG = ItsService.class.getSimpleName();
 
+    private final int SERVICE_NOTIFICATION_ID = 37; // random int that is unique within app
+    private NotificationChannel mChannel;
+
     // Timeouts, in seconds.
     private static final int TIMEOUT_CALLBACK = 20;
     private static final int TIMEOUT_3A = 10;
@@ -135,6 +146,7 @@
     private CameraDevice mCamera = null;
     private CameraCaptureSession mSession = null;
     private ImageReader[] mOutputImageReaders = null;
+    private SparseArray<String> mPhysicalStreamMap = new SparseArray<String>();
     private ImageReader mInputImageReader = null;
     private CameraCharacteristics mCameraCharacteristics = null;
 
@@ -174,7 +186,8 @@
     private CaptureResult mCaptureResults[] = null;
 
     private volatile ConditionVariable mInterlock3A = new ConditionVariable(true);
-    private volatile boolean mIssuedRequest3A = false;
+
+    final Object m3AStateLock = new Object();
     private volatile boolean mConvergedAE = false;
     private volatile boolean mConvergedAF = false;
     private volatile boolean mConvergedAWB = false;
@@ -201,8 +214,11 @@
     private HandlerThread mSensorThread = null;
     private Handler mSensorHandler = null;
 
+    private static final int SERIALIZER_SURFACES_ID = 2;
+    private static final int SERIALIZER_PHYSICAL_METADATA_ID = 3;
+
     public interface CaptureCallback {
-        void onCaptureAvailable(Image capture);
+        void onCaptureAvailable(Image capture, String physicalCameraId);
     }
 
     public abstract class CaptureResultListener extends CameraCaptureSession.CaptureCallback {}
@@ -270,6 +286,15 @@
         } catch (ItsException e) {
             Logt.e(TAG, "Service failed to start: ", e);
         }
+
+        NotificationManager notificationManager =
+                (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE);
+        mChannel = new NotificationChannel(
+                "ItsServiceChannel", "ItsService", NotificationManager.IMPORTANCE_LOW);
+        // Configure the notification channel.
+        mChannel.setDescription("ItsServiceChannel");
+        mChannel.enableVibration(false);
+        notificationManager.createNotificationChannel(mChannel);
     }
 
     @Override
@@ -285,6 +310,13 @@
             } else {
                 Logt.e(TAG, "Starting ItsService in bad state");
             }
+
+            Notification notification = new Notification.Builder(this, mChannel.getId())
+                    .setContentTitle("CameraITS Service")
+                    .setContentText("CameraITS Service is running")
+                    .setSmallIcon(R.drawable.icon)
+                    .setOngoing(true).build();
+            startForeground(SERVICE_NOTIFICATION_ID, notification);
         } catch (java.lang.InterruptedException e) {
             Logt.e(TAG, "Error starting ItsService (interrupted)", e);
         }
@@ -314,17 +346,16 @@
         }
     }
 
-    public void openCameraDevice(int cameraId) throws ItsException {
-        Logt.i(TAG, String.format("Opening camera %d", cameraId));
+    public void openCameraDevice(String cameraId) throws ItsException {
+        Logt.i(TAG, String.format("Opening camera %s", cameraId));
 
-        String[] devices;
         try {
-            devices = mCameraManager.getCameraIdList();
-            if (devices == null || devices.length == 0) {
-                throw new ItsException("No camera devices");
-            }
             if (mMemoryQuota == -1) {
                 // Initialize memory quota on this device
+                List<String> devices = ItsUtils.getItsCompatibleCameraIds(mCameraManager);
+                if (devices.size() == 0) {
+                    throw new ItsException("No camera devices");
+                }
                 for (String camId : devices) {
                     CameraCharacteristics chars =  mCameraManager.getCameraCharacteristics(camId);
                     Size maxYuvSize = ItsUtils.getMaxOutputSize(
@@ -341,10 +372,8 @@
         }
 
         try {
-            mCamera = mBlockingCameraManager.openCamera(devices[cameraId],
-                    mCameraListener, mCameraHandler);
-            mCameraCharacteristics = mCameraManager.getCameraCharacteristics(
-                    devices[cameraId]);
+            mCamera = mBlockingCameraManager.openCamera(cameraId, mCameraListener, mCameraHandler);
+            mCameraCharacteristics = mCameraManager.getCameraCharacteristics(cameraId);
             mSocketQueueQuota = new Semaphore(mMemoryQuota, true);
         } catch (CameraAccessException e) {
             throw new ItsException("Failed to open camera", e);
@@ -395,9 +424,20 @@
                             jsonObj.put("captureResult", ItsSerializer.serialize(
                                     (CaptureResult)obj));
                         } else if (obj instanceof JSONArray) {
-                            jsonObj.put("outputs", (JSONArray)obj);
+                            if (tag == "captureResults") {
+                                if (i == SERIALIZER_SURFACES_ID) {
+                                    jsonObj.put("outputs", (JSONArray)obj);
+                                } else if (i == SERIALIZER_PHYSICAL_METADATA_ID) {
+                                    jsonObj.put("physicalResults", (JSONArray)obj);
+                                } else {
+                                    throw new ItsException(
+                                            "Unsupported JSONArray for captureResults");
+                                }
+                            } else {
+                                jsonObj.put("outputs", (JSONArray)obj);
+                            }
                         } else {
-                            throw new ItsException("Invalid object received for serialiation");
+                            throw new ItsException("Invalid object received for serialization");
                         }
                     }
                     if (tag == null) {
@@ -603,12 +643,14 @@
                 JSONObject cmdObj = new JSONObject(cmd);
                 Logt.i(TAG, "Start processing command" + cmdObj.getString("cmdName"));
                 if ("open".equals(cmdObj.getString("cmdName"))) {
-                    int cameraId = cmdObj.getInt("cameraId");
+                    String cameraId = cmdObj.getString("cameraId");
                     openCameraDevice(cameraId);
                 } else if ("close".equals(cmdObj.getString("cmdName"))) {
                     closeCameraDevice();
                 } else if ("getCameraProperties".equals(cmdObj.getString("cmdName"))) {
                     doGetProps();
+                } else if ("getCameraPropertiesById".equals(cmdObj.getString("cmdName"))) {
+                    doGetPropsById(cmdObj);
                 } else if ("startSensorEvents".equals(cmdObj.getString("cmdName"))) {
                     doStartSensorEvents();
                 } else if ("checkSensorExistence".equals(cmdObj.getString("cmdName"))) {
@@ -727,7 +769,7 @@
 
         public void sendResponseCaptureResult(CameraCharacteristics props,
                                               CaptureRequest request,
-                                              CaptureResult result,
+                                              TotalCaptureResult result,
                                               ImageReader[] readers)
                 throws ItsException {
             try {
@@ -765,12 +807,19 @@
                     jsonSurfaces.put(jsonSurface);
                 }
 
-                Object objs[] = new Object[5];
+                Map<String, CaptureResult> physicalMetadata =
+                        result.getPhysicalCameraResults();
+                JSONArray jsonPhysicalMetadata = new JSONArray();
+                for (Map.Entry<String, CaptureResult> pair : physicalMetadata.entrySet()) {
+                    JSONObject jsonOneMetadata = new JSONObject();
+                    jsonOneMetadata.put(pair.getKey(), ItsSerializer.serialize(pair.getValue()));
+                    jsonPhysicalMetadata.put(jsonOneMetadata);
+                }
+                Object objs[] = new Object[4];
                 objs[0] = "captureResults";
-                objs[1] = props;
-                objs[2] = request;
-                objs[3] = result;
-                objs[4] = jsonSurfaces;
+                objs[1] = result;
+                objs[SERIALIZER_SURFACES_ID] = jsonSurfaces;
+                objs[SERIALIZER_PHYSICAL_METADATA_ID] = jsonPhysicalMetadata;
                 mSerializerQueue.put(objs);
             } catch (org.json.JSONException e) {
                 throw new ItsException("JSON error: ", e);
@@ -788,7 +837,13 @@
                 Image i = null;
                 try {
                     i = reader.acquireNextImage();
-                    listener.onCaptureAvailable(i);
+                    String physicalCameraId = new String();
+                    for (int idx = 0; idx < mOutputImageReaders.length; idx++) {
+                        if (mOutputImageReaders[idx] == reader) {
+                            physicalCameraId = mPhysicalStreamMap.get(idx);
+                        }
+                    }
+                    listener.onCaptureAvailable(i, physicalCameraId);
                 } finally {
                     if (i != null) {
                         i.close();
@@ -840,9 +895,12 @@
         mSocketRunnableObj.sendResponse(mCameraCharacteristics);
     }
 
-    private void doGetCameraIds() throws ItsException {
+    private void doGetPropsById(JSONObject params) throws ItsException {
         String[] devices;
         try {
+            // Intentionally not using ItsUtils.getItsCompatibleCameraIds here so it's possible to
+            // write some simple script to query camera characteristics even for devices exempted
+            // from ITS today.
             devices = mCameraManager.getCameraIdList();
             if (devices == null || devices.length == 0) {
                 throw new ItsException("No camera devices");
@@ -852,23 +910,38 @@
         }
 
         try {
+            String cameraId = params.getString("cameraId");
+            if (Arrays.asList(devices).contains(cameraId)) {
+                CameraCharacteristics characteristics =
+                        mCameraManager.getCameraCharacteristics(cameraId);
+                mSocketRunnableObj.sendResponse(characteristics);
+            } else {
+                Log.e(TAG, "Invalid camera ID: " + cameraId);
+                throw new ItsException("Invalid cameraId:" + cameraId);
+            }
+        } catch (org.json.JSONException e) {
+            throw new ItsException("JSON error: ", e);
+        } catch (CameraAccessException e) {
+            throw new ItsException("Access error: ", e);
+        }
+    }
+
+    private void doGetCameraIds() throws ItsException {
+        List<String> devices = ItsUtils.getItsCompatibleCameraIds(mCameraManager);
+        if (devices.size() == 0) {
+            throw new ItsException("No camera devices");
+        }
+
+        try {
             JSONObject obj = new JSONObject();
             JSONArray array = new JSONArray();
             for (String id : devices) {
-                CameraCharacteristics characteristics = mCameraManager.getCameraCharacteristics(id);
-                // Only supply camera Id for non-legacy cameras since legacy camera does not
-                // support ITS
-                if (characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL) !=
-                        CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
-                    array.put(id);
-                }
+                array.put(id);
             }
             obj.put("cameraIdArray", array);
             mSocketRunnableObj.sendResponse("cameraIds", obj);
         } catch (org.json.JSONException e) {
             throw new ItsException("JSON error: ", e);
-        } catch (android.hardware.camera2.CameraAccessException e) {
-            throw new ItsException("Access error: ", e);
         }
     }
 
@@ -912,6 +985,7 @@
     }
 
     private void do3A(JSONObject params) throws ItsException {
+        ThreeAResultListener threeAListener = new ThreeAResultListener();
         try {
             // Start a 3A action, and wait for it to converge.
             // Get the converged values for each "A", and package into JSON result for caller.
@@ -964,11 +1038,6 @@
                 }
             }
 
-            // If AE or AWB lock is specified, then the 3A will converge first and then lock these
-            // values, waiting until the HAL has reported that the lock was successful.
-            mNeedsLockedAE = params.optBoolean(LOCK_AE_KEY, false);
-            mNeedsLockedAWB = params.optBoolean(LOCK_AWB_KEY, false);
-
             // An EV compensation can be specified as part of AE convergence.
             int evComp = params.optInt(EVCOMP_KEY, 0);
             if (evComp != 0) {
@@ -1000,12 +1069,17 @@
             }
 
             mInterlock3A.open();
-            mIssuedRequest3A = false;
-            mConvergedAE = false;
-            mConvergedAWB = false;
-            mConvergedAF = false;
-            mLockedAE = false;
-            mLockedAWB = false;
+            synchronized(m3AStateLock) {
+                // If AE or AWB lock is specified, then the 3A will converge first and then lock these
+                // values, waiting until the HAL has reported that the lock was successful.
+                mNeedsLockedAE = params.optBoolean(LOCK_AE_KEY, false);
+                mNeedsLockedAWB = params.optBoolean(LOCK_AWB_KEY, false);
+                mConvergedAE = false;
+                mConvergedAWB = false;
+                mConvergedAF = false;
+                mLockedAE = false;
+                mLockedAWB = false;
+            }
             long tstart = System.currentTimeMillis();
             boolean triggeredAE = false;
             boolean triggeredAF = false;
@@ -1028,71 +1102,83 @@
                 }
                 mInterlock3A.close();
 
-                // If not converged yet, issue another capture request.
-                if (       (doAE && (!triggeredAE || !mConvergedAE))
-                        || !mConvergedAWB
-                        || (doAF && (!triggeredAF || !mConvergedAF))
-                        || (doAE && mNeedsLockedAE && !mLockedAE)
-                        || (mNeedsLockedAWB && !mLockedAWB)) {
+                synchronized(m3AStateLock) {
+                    // If not converged yet, issue another capture request.
+                    if (       (doAE && (!triggeredAE || !mConvergedAE))
+                            || !mConvergedAWB
+                            || (doAF && (!triggeredAF || !mConvergedAF))
+                            || (doAE && mNeedsLockedAE && !mLockedAE)
+                            || (mNeedsLockedAWB && !mLockedAWB)) {
 
-                    // Baseline capture request for 3A.
-                    CaptureRequest.Builder req = mCamera.createCaptureRequest(
-                            CameraDevice.TEMPLATE_PREVIEW);
-                    req.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
-                    req.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
-                    req.set(CaptureRequest.CONTROL_CAPTURE_INTENT,
-                            CaptureRequest.CONTROL_CAPTURE_INTENT_PREVIEW);
-                    req.set(CaptureRequest.CONTROL_AE_MODE,
-                            CaptureRequest.CONTROL_AE_MODE_ON);
-                    req.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, 0);
-                    req.set(CaptureRequest.CONTROL_AE_LOCK, false);
-                    req.set(CaptureRequest.CONTROL_AE_REGIONS, regionAE);
-                    req.set(CaptureRequest.CONTROL_AF_MODE,
-                            CaptureRequest.CONTROL_AF_MODE_AUTO);
-                    req.set(CaptureRequest.CONTROL_AF_REGIONS, regionAF);
-                    req.set(CaptureRequest.CONTROL_AWB_MODE,
-                            CaptureRequest.CONTROL_AWB_MODE_AUTO);
-                    req.set(CaptureRequest.CONTROL_AWB_LOCK, false);
-                    req.set(CaptureRequest.CONTROL_AWB_REGIONS, regionAWB);
-                    // ITS only turns OIS on when it's explicitly requested
-                    req.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE,
-                            CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_OFF);
+                        // Baseline capture request for 3A.
+                        CaptureRequest.Builder req = mCamera.createCaptureRequest(
+                                CameraDevice.TEMPLATE_PREVIEW);
+                        req.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
+                        req.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
+                        req.set(CaptureRequest.CONTROL_CAPTURE_INTENT,
+                                CaptureRequest.CONTROL_CAPTURE_INTENT_PREVIEW);
+                        req.set(CaptureRequest.CONTROL_AE_MODE,
+                                CaptureRequest.CONTROL_AE_MODE_ON);
+                        req.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, 0);
+                        req.set(CaptureRequest.CONTROL_AE_LOCK, false);
+                        req.set(CaptureRequest.CONTROL_AE_REGIONS, regionAE);
+                        req.set(CaptureRequest.CONTROL_AF_MODE,
+                                CaptureRequest.CONTROL_AF_MODE_AUTO);
+                        req.set(CaptureRequest.CONTROL_AF_REGIONS, regionAF);
+                        req.set(CaptureRequest.CONTROL_AWB_MODE,
+                                CaptureRequest.CONTROL_AWB_MODE_AUTO);
+                        req.set(CaptureRequest.CONTROL_AWB_LOCK, false);
+                        req.set(CaptureRequest.CONTROL_AWB_REGIONS, regionAWB);
+                        // ITS only turns OIS on when it's explicitly requested
+                        req.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE,
+                                CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_OFF);
 
-                    if (evComp != 0) {
-                        req.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, evComp);
+                        if (evComp != 0) {
+                            req.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, evComp);
+                        }
+
+                        if (mConvergedAE && mNeedsLockedAE) {
+                            req.set(CaptureRequest.CONTROL_AE_LOCK, true);
+                        }
+                        if (mConvergedAWB && mNeedsLockedAWB) {
+                            req.set(CaptureRequest.CONTROL_AWB_LOCK, true);
+                        }
+
+                        boolean triggering = false;
+                        // Trigger AE first.
+                        if (doAE && !triggeredAE) {
+                            Logt.i(TAG, "Triggering AE");
+                            req.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
+                                    CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
+                            triggeredAE = true;
+                            triggering = true;
+                        }
+
+                        // After AE has converged, trigger AF.
+                        if (doAF && !triggeredAF && (!doAE || (triggeredAE && mConvergedAE))) {
+                            Logt.i(TAG, "Triggering AF");
+                            req.set(CaptureRequest.CONTROL_AF_TRIGGER,
+                                    CaptureRequest.CONTROL_AF_TRIGGER_START);
+                            triggeredAF = true;
+                            triggering = true;
+                        }
+
+                        req.addTarget(mOutputImageReaders[0].getSurface());
+
+                        if (triggering) {
+                            // Send single request for AE/AF trigger
+                            mSession.capture(req.build(),
+                                    threeAListener, mResultHandler);
+                        } else {
+                            // Use repeating request for non-trigger requests
+                            mSession.setRepeatingRequest(req.build(),
+                                    threeAListener, mResultHandler);
+                        }
+                    } else {
+                        mSocketRunnableObj.sendResponse("3aConverged", "");
+                        Logt.i(TAG, "3A converged");
+                        break;
                     }
-
-                    if (mConvergedAE && mNeedsLockedAE) {
-                        req.set(CaptureRequest.CONTROL_AE_LOCK, true);
-                    }
-                    if (mConvergedAWB && mNeedsLockedAWB) {
-                        req.set(CaptureRequest.CONTROL_AWB_LOCK, true);
-                    }
-
-                    // Trigger AE first.
-                    if (doAE && !triggeredAE) {
-                        Logt.i(TAG, "Triggering AE");
-                        req.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
-                                CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
-                        triggeredAE = true;
-                    }
-
-                    // After AE has converged, trigger AF.
-                    if (doAF && !triggeredAF && (!doAE || (triggeredAE && mConvergedAE))) {
-                        Logt.i(TAG, "Triggering AF");
-                        req.set(CaptureRequest.CONTROL_AF_TRIGGER,
-                                CaptureRequest.CONTROL_AF_TRIGGER_START);
-                        triggeredAF = true;
-                    }
-
-                    req.addTarget(mOutputImageReaders[0].getSurface());
-
-                    mIssuedRequest3A = true;
-                    mSession.capture(req.build(), mCaptureResultListener, mResultHandler);
-                } else {
-                    mSocketRunnableObj.sendResponse("3aConverged", "");
-                    Logt.i(TAG, "3A converged");
-                    break;
                 }
             }
         } catch (android.hardware.camera2.CameraAccessException e) {
@@ -1101,6 +1187,11 @@
             throw new ItsException("JSON error: ", e);
         } finally {
             mSocketRunnableObj.sendResponse("3aDone", "");
+            // stop listener from updating 3A states
+            threeAListener.stop();
+            if (mSession != null) {
+                mSession.close();
+            }
         }
     }
 
@@ -1133,6 +1224,7 @@
         Size outputSizes[];
         int outputFormats[];
         int numSurfaces = 0;
+        mPhysicalStreamMap.clear();
 
         if (jsonOutputSpecs != null) {
             try {
@@ -1201,6 +1293,10 @@
                     if (height <= 0) {
                         height = ItsUtils.getMaxSize(sizes).getHeight();
                     }
+                    String physicalCameraId = surfaceObj.optString("physicalCamera");
+                    if (physicalCameraId != null) {
+                        mPhysicalStreamMap.put(i, physicalCameraId);
+                    }
 
                     // The stats computation only applies to the active array region.
                     int aaw = ItsUtils.getActiveArrayCropRegion(mCameraCharacteristics).width();
@@ -1253,7 +1349,8 @@
 
                 int newCount = mCountCallbacksRemaining.get();
                 if (newCount == currentCount) {
-                    throw new ItsException("No callback received within timeout");
+                    throw new ItsException("No callback received within timeout " +
+                            timeoutMs + "ms");
                 }
                 currentCount = newCount;
             }
@@ -1292,11 +1389,18 @@
                 numSurfaces = mOutputImageReaders.length;
                 numCaptureSurfaces = numSurfaces - (backgroundRequest ? 1 : 0);
 
-                List<Surface> outputSurfaces = new ArrayList<Surface>(numSurfaces);
+                List<OutputConfiguration> outputConfigs =
+                        new ArrayList<OutputConfiguration>(numSurfaces);
                 for (int i = 0; i < numSurfaces; i++) {
-                    outputSurfaces.add(mOutputImageReaders[i].getSurface());
+                    OutputConfiguration config = new OutputConfiguration(
+                            mOutputImageReaders[i].getSurface());
+                    if (mPhysicalStreamMap.get(i) != null) {
+                        config.setPhysicalCameraId(mPhysicalStreamMap.get(i));
+                    }
+                    outputConfigs.add(config);
                 }
-                mCamera.createCaptureSession(outputSurfaces, sessionListener, mCameraHandler);
+                mCamera.createCaptureSessionByOutputConfigurations(outputConfigs,
+                        sessionListener, mCameraHandler);
                 mSession = sessionListener.waitAndGetSession(TIMEOUT_IDLE_MS);
 
                 for (int i = 0; i < numSurfaces; i++) {
@@ -1561,7 +1665,7 @@
 
     private final CaptureCallback mCaptureCallback = new CaptureCallback() {
         @Override
-        public void onCaptureAvailable(Image capture) {
+        public void onCaptureAvailable(Image capture, String physicalCameraId) {
             try {
                 int format = capture.getFormat();
                 if (format == ImageFormat.JPEG) {
@@ -1574,20 +1678,21 @@
                     Logt.i(TAG, "Received YUV capture");
                     byte[] img = ItsUtils.getDataFromImage(capture, mSocketQueueQuota);
                     ByteBuffer buf = ByteBuffer.wrap(img);
-                    int count = mCountYuv.getAndIncrement();
-                    mSocketRunnableObj.sendResponseCaptureBuffer("yuvImage", buf);
+                    mSocketRunnableObj.sendResponseCaptureBuffer(
+                            "yuvImage"+physicalCameraId, buf);
                 } else if (format == ImageFormat.RAW10) {
                     Logt.i(TAG, "Received RAW10 capture");
                     byte[] img = ItsUtils.getDataFromImage(capture, mSocketQueueQuota);
                     ByteBuffer buf = ByteBuffer.wrap(img);
                     int count = mCountRaw10.getAndIncrement();
-                    mSocketRunnableObj.sendResponseCaptureBuffer("raw10Image", buf);
+                    mSocketRunnableObj.sendResponseCaptureBuffer(
+                            "raw10Image"+physicalCameraId, buf);
                 } else if (format == ImageFormat.RAW12) {
                     Logt.i(TAG, "Received RAW12 capture");
                     byte[] img = ItsUtils.getDataFromImage(capture, mSocketQueueQuota);
                     ByteBuffer buf = ByteBuffer.wrap(img);
                     int count = mCountRaw12.getAndIncrement();
-                    mSocketRunnableObj.sendResponseCaptureBuffer("raw12Image", buf);
+                    mSocketRunnableObj.sendResponseCaptureBuffer("raw12Image"+physicalCameraId, buf);
                 } else if (format == ImageFormat.RAW_SENSOR) {
                     Logt.i(TAG, "Received RAW16 capture");
                     int count = mCountRawOrDng.getAndIncrement();
@@ -1595,7 +1700,8 @@
                         byte[] img = ItsUtils.getDataFromImage(capture, mSocketQueueQuota);
                         if (! mCaptureRawIsStats) {
                             ByteBuffer buf = ByteBuffer.wrap(img);
-                            mSocketRunnableObj.sendResponseCaptureBuffer("rawImage", buf);
+                            mSocketRunnableObj.sendResponseCaptureBuffer(
+                                    "rawImage" + physicalCameraId, buf);
                         } else {
                             // Compute the requested stats on the raw frame, and return the results
                             // in a new "stats image".
@@ -1610,6 +1716,14 @@
                                               .left;
                             int aay = ItsUtils.getActiveArrayCropRegion(mCameraCharacteristics)
                                               .top;
+
+                            if (w == aaw) {
+                                aax = 0;
+                            }
+                            if (h == aah) {
+                                aay = 0;
+                            }
+
                             int gw = mCaptureStatsGridWidth;
                             int gh = mCaptureStatsGridHeight;
                             float[] stats = StatsImage.computeStatsImage(
@@ -1679,6 +1793,198 @@
         return (float)r.getNumerator() / (float)r.getDenominator();
     }
 
+    private String buildLogString(CaptureResult result) throws ItsException {
+        StringBuilder logMsg = new StringBuilder();
+        logMsg.append(String.format(
+                "Capt result: AE=%d, AF=%d, AWB=%d, ",
+                result.get(CaptureResult.CONTROL_AE_STATE),
+                result.get(CaptureResult.CONTROL_AF_STATE),
+                result.get(CaptureResult.CONTROL_AWB_STATE)));
+        int[] capabilities = mCameraCharacteristics.get(
+                CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES);
+        if (capabilities == null) {
+            throw new ItsException("Failed to get capabilities");
+        }
+        boolean readSensorSettings = false;
+        for (int capability : capabilities) {
+            if (capability ==
+                    CameraCharacteristics.
+                            REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS) {
+                readSensorSettings = true;
+                break;
+            }
+        }
+        if (readSensorSettings) {
+            logMsg.append(String.format(
+                    "sens=%d, exp=%.1fms, dur=%.1fms, ",
+                    result.get(CaptureResult.SENSOR_SENSITIVITY),
+                    result.get(CaptureResult.SENSOR_EXPOSURE_TIME).longValue() / 1000000.0f,
+                    result.get(CaptureResult.SENSOR_FRAME_DURATION).longValue() /
+                                1000000.0f));
+        }
+        if (result.get(CaptureResult.COLOR_CORRECTION_GAINS) != null) {
+            logMsg.append(String.format(
+                    "gains=[%.1f, %.1f, %.1f, %.1f], ",
+                    result.get(CaptureResult.COLOR_CORRECTION_GAINS).getRed(),
+                    result.get(CaptureResult.COLOR_CORRECTION_GAINS).getGreenEven(),
+                    result.get(CaptureResult.COLOR_CORRECTION_GAINS).getGreenOdd(),
+                    result.get(CaptureResult.COLOR_CORRECTION_GAINS).getBlue()));
+        } else {
+            logMsg.append("gains=[], ");
+        }
+        if (result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM) != null) {
+            logMsg.append(String.format(
+                    "xform=[%.1f, %.1f, %.1f, %.1f, %.1f, %.1f, %.1f, %.1f, %.1f], ",
+                    r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,0)),
+                    r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,0)),
+                    r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,0)),
+                    r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,1)),
+                    r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,1)),
+                    r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,1)),
+                    r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,2)),
+                    r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,2)),
+                    r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,2))));
+        } else {
+            logMsg.append("xform=[], ");
+        }
+        logMsg.append(String.format(
+                "foc=%.1f",
+                result.get(CaptureResult.LENS_FOCUS_DISTANCE)));
+        return logMsg.toString();
+    }
+
+    private class ThreeAResultListener extends CaptureResultListener {
+        private volatile boolean stopped = false;
+        private boolean aeResultSent = false;
+        private boolean awbResultSent = false;
+        private boolean afResultSent = false;
+
+        @Override
+        public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request,
+                long timestamp, long frameNumber) {
+        }
+
+        @Override
+        public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
+                TotalCaptureResult result) {
+            try {
+                if (stopped) {
+                    return;
+                }
+
+                if (request == null || result == null) {
+                    throw new ItsException("Request/result is invalid");
+                }
+
+                Logt.i(TAG, buildLogString(result));
+
+                synchronized(m3AStateLock) {
+                    if (result.get(CaptureResult.CONTROL_AE_STATE) != null) {
+                        mConvergedAE = result.get(CaptureResult.CONTROL_AE_STATE) ==
+                                                  CaptureResult.CONTROL_AE_STATE_CONVERGED ||
+                                       result.get(CaptureResult.CONTROL_AE_STATE) ==
+                                                  CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED ||
+                                       result.get(CaptureResult.CONTROL_AE_STATE) ==
+                                                  CaptureResult.CONTROL_AE_STATE_LOCKED;
+                        mLockedAE = result.get(CaptureResult.CONTROL_AE_STATE) ==
+                                               CaptureResult.CONTROL_AE_STATE_LOCKED;
+                    }
+                    if (result.get(CaptureResult.CONTROL_AF_STATE) != null) {
+                        mConvergedAF = result.get(CaptureResult.CONTROL_AF_STATE) ==
+                                                  CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED;
+                    }
+                    if (result.get(CaptureResult.CONTROL_AWB_STATE) != null) {
+                        mConvergedAWB = result.get(CaptureResult.CONTROL_AWB_STATE) ==
+                                                   CaptureResult.CONTROL_AWB_STATE_CONVERGED ||
+                                        result.get(CaptureResult.CONTROL_AWB_STATE) ==
+                                                   CaptureResult.CONTROL_AWB_STATE_LOCKED;
+                        mLockedAWB = result.get(CaptureResult.CONTROL_AWB_STATE) ==
+                                                CaptureResult.CONTROL_AWB_STATE_LOCKED;
+                    }
+
+                    if (mConvergedAE && (!mNeedsLockedAE || mLockedAE) && !aeResultSent) {
+                        aeResultSent = true;
+                        if (result.get(CaptureResult.SENSOR_SENSITIVITY) != null
+                                && result.get(CaptureResult.SENSOR_EXPOSURE_TIME) != null) {
+                            mSocketRunnableObj.sendResponse("aeResult", String.format("%d %d",
+                                    result.get(CaptureResult.SENSOR_SENSITIVITY).intValue(),
+                                    result.get(CaptureResult.SENSOR_EXPOSURE_TIME).intValue()
+                                    ));
+                        } else {
+                            Logt.i(TAG, String.format(
+                                    "AE converged but NULL exposure values, sensitivity:%b, expTime:%b",
+                                    result.get(CaptureResult.SENSOR_SENSITIVITY) == null,
+                                    result.get(CaptureResult.SENSOR_EXPOSURE_TIME) == null));
+                        }
+                    }
+
+                    if (mConvergedAF && !afResultSent) {
+                        afResultSent = true;
+                        if (result.get(CaptureResult.LENS_FOCUS_DISTANCE) != null) {
+                            mSocketRunnableObj.sendResponse("afResult", String.format("%f",
+                                    result.get(CaptureResult.LENS_FOCUS_DISTANCE)
+                                    ));
+                        } else {
+                            Logt.i(TAG, "AF converged but NULL focus distance values");
+                        }
+                    }
+
+                    if (mConvergedAWB && (!mNeedsLockedAWB || mLockedAWB) && !awbResultSent) {
+                        awbResultSent = true;
+                        if (result.get(CaptureResult.COLOR_CORRECTION_GAINS) != null
+                                && result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM) != null) {
+                            mSocketRunnableObj.sendResponse("awbResult", String.format(
+                                    "%f %f %f %f %f %f %f %f %f %f %f %f %f",
+                                    result.get(CaptureResult.COLOR_CORRECTION_GAINS).getRed(),
+                                    result.get(CaptureResult.COLOR_CORRECTION_GAINS).getGreenEven(),
+                                    result.get(CaptureResult.COLOR_CORRECTION_GAINS).getGreenOdd(),
+                                    result.get(CaptureResult.COLOR_CORRECTION_GAINS).getBlue(),
+                                    r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).
+                                            getElement(0,0)),
+                                    r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).
+                                            getElement(1,0)),
+                                    r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).
+                                            getElement(2,0)),
+                                    r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).
+                                            getElement(0,1)),
+                                    r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).
+                                            getElement(1,1)),
+                                    r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).
+                                            getElement(2,1)),
+                                    r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).
+                                            getElement(0,2)),
+                                    r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).
+                                            getElement(1,2)),
+                                    r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).
+                                            getElement(2,2))));
+                        } else {
+                            Logt.i(TAG, String.format(
+                                    "AWB converged but NULL color correction values, gains:%b, ccm:%b",
+                                    result.get(CaptureResult.COLOR_CORRECTION_GAINS) == null,
+                                    result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM) == null));
+                        }
+                    }
+                }
+
+                mInterlock3A.open();
+            } catch (ItsException e) {
+                Logt.e(TAG, "Script error: ", e);
+            } catch (Exception e) {
+                Logt.e(TAG, "Script error: ", e);
+            }
+        }
+
+        @Override
+        public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request,
+                CaptureFailure failure) {