Merge "Refactor notification tests to clean them up." into lmp-sprout-dev
diff --git a/apps/CameraITS/CameraITS.pdf b/apps/CameraITS/CameraITS.pdf
index 5e5fd29..0d10bae 100644
--- a/apps/CameraITS/CameraITS.pdf
+++ b/apps/CameraITS/CameraITS.pdf
Binary files differ
diff --git a/apps/CameraITS/build/envsetup.sh b/apps/CameraITS/build/envsetup.sh
index a95c445..6069341 100644
--- a/apps/CameraITS/build/envsetup.sh
+++ b/apps/CameraITS/build/envsetup.sh
@@ -29,7 +29,7 @@
 python -V 2>&1 | grep -q "Python 2.7" || \
     echo ">> Require python 2.7" >&2
 
-for M in numpy PIL Image matplotlib pylab
+for M in numpy PIL Image matplotlib pylab cv2 scipy.stats scipy.spatial
 do
     python -c "import $M" >/dev/null 2>&1 || \
         echo ">> Require Python $M module" >&2
diff --git a/apps/CameraITS/pymodules/its/device.py b/apps/CameraITS/pymodules/its/device.py
index 6f42051..beba0ae 100644
--- a/apps/CameraITS/pymodules/its/device.py
+++ b/apps/CameraITS/pymodules/its/device.py
@@ -260,7 +260,8 @@
                     regions_af=[[0,0,1,1,1]],
                     do_ae=True, do_awb=True, do_af=True,
                     lock_ae=False, lock_awb=False,
-                    get_results=False):
+                    get_results=False,
+                    ev_comp=0):
         """Perform a 3A operation on the device.
 
         Triggers some or all of AE, AWB, and AF, and returns once they have
@@ -278,6 +279,7 @@
             lock_ae: Request AE lock after convergence, and wait for it.
             lock_awb: Request AWB lock after convergence, and wait for it.
             get_results: Return the 3A results from this function.
+            ev_comp: An EV compensation value to use when running AE.
 
         Region format in args:
             Arguments are lists of weighted regions; each weighted region is a
@@ -307,6 +309,8 @@
             cmd["aeLock"] = True
         if lock_awb:
             cmd["awbLock"] = True
+        if ev_comp != 0:
+            cmd["evComp"] = ev_comp
         self.sock.send(json.dumps(cmd) + "\n")
 
         # Wait for each specified 3A to converge.
diff --git a/apps/CameraITS/pymodules/its/image.py b/apps/CameraITS/pymodules/its/image.py
index f2425e1..b3bdb65 100644
--- a/apps/CameraITS/pymodules/its/image.py
+++ b/apps/CameraITS/pymodules/its/image.py
@@ -540,9 +540,13 @@
     img = numpy.vstack(chs).T.reshape(h/f,w/f,chans)
     return img
 
-def __measure_color_checker_patch(img, xc,yc, patch_size):
+def __get_color_checker_patch(img, xc,yc, patch_size):
     r = patch_size/2
-    tile = img[yc-r:yc+r+1:, xc-r:xc+r+1:, ::]
+    tile = img[yc-r:yc+r:, xc-r:xc+r:, ::]
+    return tile
+
+def __measure_color_checker_patch(img, xc,yc, patch_size):
+    tile = __get_color_checker_patch(img, xc,yc, patch_size)
     means = tile.mean(1).mean(0)
     return means
 
@@ -561,15 +565,9 @@
     * Standard color checker chart with standard-sized black borders.
 
     The values returned are in the coordinate system of the chart; that is,
-    the "origin" patch is the brown patch that is in the chart's top-left
-    corner when it is in the normal upright/horizontal orientation. (The chart
-    may be any of the four main orientations in the image.)
-
-    The chart is 6x4 patches in the normal upright orientation. The return
-    values of this function are the center coordinate of the top-left patch,
-    and the displacement vectors to the next patches to the right and below
-    the top-left patch. From these pieces of data, the center coordinates of
-    any of the patches can be computed.
+    patch (0,0) is the brown patch that is in the chart's top-left corner when
+    it is in the normal upright/horizontal orientation. (The chart may be any
+    of the four main orientations in the image.)
 
     Args:
         img: Input image, as a numpy array with pixels in [0,1].
@@ -677,6 +675,7 @@
             patches[yi].append((xc,yc))
 
     # Sanity check: test that the R,G,B,black,white patches are correct.
+    sanity_failed = False
     patch_info = [(2,2,[0]), # Red
                   (2,1,[1]), # Green
                   (2,0,[2]), # Blue
@@ -689,16 +688,19 @@
         means = __measure_color_checker_patch(img, xc,yc, 64)
         if (min([means[i] for i in high_chans]+[1]) < \
                 max([means[i] for i in low_chans]+[0])):
-            print "Color patch sanity check failed: patch", i
-            # If the debug info is requested, then don't assert that the patches
-            # are matched, to allow the caller to see the output.
-            if debug_fname_prefix is None:
-                assert(0)
+            sanity_failed = True
 
     if debug_fname_prefix is not None:
-        for (xc,yc) in sum(patches,[]):
-            img[yc,xc] = 1.0
-        write_image(img, debug_fname_prefix+"_2.jpg")
+        gridimg = numpy.zeros([4*(32+2), 6*(32+2), 3])
+        for yi in range(4):
+            for xi in range(6):
+                xc,yc = patches[yi][xi]
+                tile = __get_color_checker_patch(img, xc,yc, 32)
+                gridimg[yi*(32+2)+1:yi*(32+2)+1+32,
+                        xi*(32+2)+1:xi*(32+2)+1+32, :] = tile
+        write_image(gridimg, debug_fname_prefix+"_2.png")
+
+    assert(not sanity_failed)
 
     return patches
 
diff --git a/apps/CameraITS/tests/dng_noise_model/DngNoiseModel.pdf b/apps/CameraITS/tests/dng_noise_model/DngNoiseModel.pdf
new file mode 100644
index 0000000..01389fa
--- /dev/null
+++ b/apps/CameraITS/tests/dng_noise_model/DngNoiseModel.pdf
Binary files differ
diff --git a/apps/CameraITS/tools/compute_dng_noise_model.py b/apps/CameraITS/tests/dng_noise_model/dng_noise_model.py
similarity index 86%
rename from apps/CameraITS/tools/compute_dng_noise_model.py
rename to apps/CameraITS/tests/dng_noise_model/dng_noise_model.py
index 1b57754..19b6c92 100644
--- a/apps/CameraITS/tools/compute_dng_noise_model.py
+++ b/apps/CameraITS/tests/dng_noise_model/dng_noise_model.py
@@ -50,7 +50,7 @@
         s_e_prod *= 2
 
         # Capture raw frames across the full sensitivity range.
-        NUM_SENS_STEPS = 15
+        NUM_SENS_STEPS = 9
         sens_step = int((sens_max - sens_min - 1) / float(NUM_SENS_STEPS))
         reqs = []
         sens = []
@@ -75,7 +75,7 @@
         patches = [(2*x,2*y) for (x,y) in sum(patches,[])]
 
         lines = []
-        for (s,cap) in zip(sens,caps):
+        for iouter, (s,cap) in enumerate(zip(sens,caps)):
             # For each capture, compute the mean value in each patch, for each
             # Bayer plane; discard patches where pixels are close to clamped.
             # Also compute the variance.
@@ -117,10 +117,17 @@
             #assert(m > 0)
             #assert(b >= 0)
 
-            # Draw a plot.
-            pylab.plot(xs, ys, 'r')
-            pylab.plot([0,xs[-1]],[b,m*xs[-1]+b],'b')
-            matplotlib.pyplot.savefig("%s_plot_mean_vs_variance.png" % (NAME))
+            if iouter == 0:
+                pylab.plot(xs, ys, 'r', label="Measured")
+                pylab.plot([0,xs[-1]],[b,m*xs[-1]+b],'b', label="Fit")
+            else:
+                pylab.plot(xs, ys, 'r')
+                pylab.plot([0,xs[-1]],[b,m*xs[-1]+b],'b')
+
+        pylab.xlabel("Mean")
+        pylab.ylabel("Variance")
+        pylab.legend()
+        matplotlib.pyplot.savefig("%s_plot_mean_vs_variance.png" % (NAME))
 
         # Now fit a line across the (m,b) line parameters for each sensitivity.
         # The gradient (m) params are fit to the "S" line, and the offset (b)
@@ -132,11 +139,16 @@
         mO,bO = numpy.polyfit(gains, Os, 1)
 
         # Plot curve "O" as 10x, so it fits in the same scale as curve "S".
-        pylab.plot(gains, [10*o for o in Os], 'r')
+        fig = matplotlib.pyplot.figure()
+        pylab.plot(gains, [10*o for o in Os], 'r', label="Measured")
         pylab.plot([gains[0],gains[-1]],
-                [10*mO*gains[0]+10*bO, 10*mO*gains[-1]+10*bO], 'b')
-        pylab.plot(gains, Ss, 'r')
-        pylab.plot([gains[0],gains[-1]], [mS*gains[0]+bS, mS*gains[-1]+bS], 'b')
+                [10*mO*gains[0]+10*bO, 10*mO*gains[-1]+10*bO],'r--',label="Fit")
+        pylab.plot(gains, Ss, 'b', label="Measured")
+        pylab.plot([gains[0],gains[-1]], [mS*gains[0]+bS,mS*gains[-1]+bS],'b--',
+                label="Fit")
+        pylab.xlabel("Sensitivity")
+        pylab.ylabel("Model parameter: S (blue), O x10 (red)")
+        pylab.legend()
         matplotlib.pyplot.savefig("%s_plot_S_O.png" % (NAME))
 
         print """
diff --git a/apps/CameraITS/tests/inprog/test_ev_compensation.py b/apps/CameraITS/tests/inprog/test_ev_compensation.py
deleted file mode 100644
index f9b0cd3..0000000
--- a/apps/CameraITS/tests/inprog/test_ev_compensation.py
+++ /dev/null
@@ -1,71 +0,0 @@
-# Copyright 2014 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import its.image
-import its.device
-import its.objects
-import os.path
-import pylab
-import matplotlib
-import matplotlib.pyplot
-import numpy
-
-def main():
-    """Tests that EV compensation is applied.
-    """
-    NAME = os.path.basename(__file__).split(".")[0]
-
-    MAX_LUMA_DELTA_THRESH = 0.01
-    AVG_LUMA_DELTA_THRESH = 0.001
-
-    with its.device.ItsSession() as cam:
-        props = cam.get_camera_properties()
-        cam.do_3a()
-
-        # Capture auto shots, but with a linear tonemap.
-        req = its.objects.auto_capture_request()
-        req["android.tonemap.mode"] = 0
-        req["android.tonemap.curveRed"] = (0.0, 0.0, 1.0, 1.0)
-        req["android.tonemap.curveGreen"] = (0.0, 0.0, 1.0, 1.0)
-        req["android.tonemap.curveBlue"] = (0.0, 0.0, 1.0, 1.0)
-
-        evs = range(-4,5)
-        lumas = []
-        for ev in evs:
-            req['android.control.aeExposureCompensation'] = ev
-            cap = cam.do_capture(req)
-            y = its.image.convert_capture_to_planes(cap)[0]
-            tile = its.image.get_image_patch(y, 0.45,0.45,0.1,0.1)
-            lumas.append(its.image.compute_image_means(tile)[0])
-
-        ev_step_size_in_stops = its.objects.rational_to_float(
-                props['android.control.aeCompensationStep'])
-        luma_increase_per_step = pow(2, ev_step_size_in_stops)
-        expected_lumas = [lumas[0] * pow(luma_increase_per_step, i) \
-                for i in range(len(evs))]
-
-        pylab.plot(evs, lumas, 'r')
-        pylab.plot(evs, expected_lumas, 'b')
-        matplotlib.pyplot.savefig("%s_plot_means.png" % (NAME))
-
-        luma_diffs = [expected_lumas[i] - lumas[i] for i in range(len(evs))]
-        max_diff = max(luma_diffs)
-        avg_diff = sum(luma_diffs) / len(luma_diffs)
-        print "Max delta between modeled and measured lumas:", max_diff
-        print "Avg delta between modeled and measured lumas:", avg_diff
-        assert(max_diff < MAX_LUMA_DELTA_THRESH)
-        assert(avg_diff < AVG_LUMA_DELTA_THRESH)
-
-if __name__ == '__main__':
-    main()
diff --git a/apps/CameraITS/tests/scene1/test_dng_noise_model.py b/apps/CameraITS/tests/scene1/test_dng_noise_model.py
new file mode 100644
index 0000000..51270b6
--- /dev/null
+++ b/apps/CameraITS/tests/scene1/test_dng_noise_model.py
@@ -0,0 +1,114 @@
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.device
+import its.caps
+import its.objects
+import its.image
+import os.path
+import pylab
+import matplotlib
+import matplotlib.pyplot
+
+def main():
+    """Verify that the DNG raw model parameters are correct.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    NUM_STEPS = 4
+
+    # Pass if the difference between expected and computed variances is small,
+    # defined as being within an absolute variance delta of 0.0005, or within
+    # 20% of the expected variance, whichever is larger; this is to allow the
+    # test to pass in the presence of some randomness (since this test is
+    # measuring noise of a small patch) and some imperfect scene conditions
+    # (since ITS doesn't require a perfectly uniformly lit scene).
+    DIFF_THRESH = 0.0005
+    FRAC_THRESH = 0.2
+
+    with its.device.ItsSession() as cam:
+
+        props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.raw(props) and
+                             its.caps.raw16(props) and
+                             its.caps.manual_sensor(props) and
+                             its.caps.read_3a(props) and
+                             its.caps.per_frame_control(props))
+
+        white_level = float(props['android.sensor.info.whiteLevel'])
+        black_levels = props['android.sensor.blackLevelPattern']
+        cfa_idxs = its.image.get_canonical_cfa_order(props)
+        black_levels = [black_levels[i] for i in cfa_idxs]
+
+        # Expose for the scene with min sensitivity
+        sens_min, sens_max = props['android.sensor.info.sensitivityRange']
+        sens_step = (sens_max - sens_min) / NUM_STEPS
+        s_ae,e_ae,_,_,_  = cam.do_3a(get_results=True)
+        s_e_prod = s_ae * e_ae
+        sensitivities = range(sens_min, sens_max, sens_step)
+
+        var_expected = [[],[],[],[]]
+        var_measured = [[],[],[],[]]
+        for sens in sensitivities:
+
+            # Capture a raw frame with the desired sensitivity.
+            exp = int(s_e_prod / float(sens))
+            req = its.objects.manual_capture_request(sens, exp)
+            cap = cam.do_capture(req, cam.CAP_RAW)
+
+            # Test each raw color channel (R, GR, GB, B):
+            noise_profile = cap["metadata"]["android.sensor.noiseProfile"]
+            assert((len(noise_profile)) == 4)
+            for ch in range(4):
+                # Get the noise model parameters for this channel of this shot.
+                s,o = noise_profile[cfa_idxs[ch]]
+
+                # Get a center tile of the raw channel, and compute the mean.
+                # Use a very small patch to ensure gross uniformity (i.e. so
+                # non-uniform lighting or vignetting doesn't affect the variance
+                # calculation).
+                plane = its.image.convert_capture_to_planes(cap, props)[ch]
+                plane = (plane * white_level - black_levels[ch]) / (
+                        white_level - black_levels[ch])
+                tile = its.image.get_image_patch(plane, 0.49,0.49,0.02,0.02)
+                mean = tile.mean()
+
+                # Calculate the expected variance based on the model, and the
+                # measured variance from the tile.
+                var_measured[ch].append(
+                        its.image.compute_image_variances(tile)[0])
+                var_expected[ch].append(s * mean + o)
+
+    for ch in range(4):
+        pylab.plot(sensitivities, var_expected[ch], "rgkb"[ch],
+                label=["R","GR","GB","B"][ch]+" expected")
+        pylab.plot(sensitivities, var_measured[ch], "rgkb"[ch]+"--",
+                label=["R", "GR", "GB", "B"][ch]+" measured")
+    pylab.xlabel("Sensitivity")
+    pylab.ylabel("Center patch variance")
+    pylab.legend(loc=2)
+    matplotlib.pyplot.savefig("%s_plot.png" % (NAME))
+
+    # Pass/fail check.
+    for ch in range(4):
+        diffs = [var_measured[ch][i] - var_expected[ch][i]
+                 for i in range(NUM_STEPS)]
+        print "Diffs (%s):"%(["R","GR","GB","B"][ch]), diffs
+        for i,diff in enumerate(diffs):
+            thresh = max(DIFF_THRESH, FRAC_THRESH * var_expected[ch][i])
+            assert(diff <= thresh)
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tests/scene1/test_ev_compensation_advanced.py b/apps/CameraITS/tests/scene1/test_ev_compensation_advanced.py
new file mode 100644
index 0000000..6341c67
--- /dev/null
+++ b/apps/CameraITS/tests/scene1/test_ev_compensation_advanced.py
@@ -0,0 +1,83 @@
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.device
+import its.caps
+import its.objects
+import os.path
+import pylab
+import matplotlib
+import matplotlib.pyplot
+import numpy
+
+def main():
+    """Tests that EV compensation is applied.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    MAX_LUMA_DELTA_THRESH = 0.02
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+        its.caps.skip_unless(its.caps.manual_sensor(props) and
+                             its.caps.manual_post_proc(props) and
+                             its.caps.per_frame_control(props))
+
+        evs = range(-4,5)
+        lumas = []
+        for ev in evs:
+            # Re-converge 3A, and lock AE once converged. skip AF trigger as
+            # dark/bright scene could make AF convergence fail and this test
+            # doesn't care the image sharpness.
+            cam.do_3a(ev_comp=ev, lock_ae=True, do_af=False)
+
+            # Capture a single shot with the same EV comp and locked AE.
+            req = its.objects.auto_capture_request()
+            req['android.control.aeExposureCompensation'] = ev
+            req["android.control.aeLock"] = True
+            # Use linear tone curve to avoid brightness being impacted
+            # by tone curves.
+            req["android.tonemap.mode"] = 0
+            req["android.tonemap.curveRed"] = [0.0,0.0, 1.0,1.0]
+            req["android.tonemap.curveGreen"] = [0.0,0.0, 1.0,1.0]
+            req["android.tonemap.curveBlue"] = [0.0,0.0, 1.0,1.0]
+            cap = cam.do_capture(req)
+            y = its.image.convert_capture_to_planes(cap)[0]
+            tile = its.image.get_image_patch(y, 0.45,0.45,0.1,0.1)
+            lumas.append(its.image.compute_image_means(tile)[0])
+
+        ev_step_size_in_stops = its.objects.rational_to_float(
+                props['android.control.aeCompensationStep'])
+        luma_increase_per_step = pow(2, ev_step_size_in_stops)
+        print "ev_step_size_in_stops", ev_step_size_in_stops
+        imid = len(lumas) / 2
+        expected_lumas = [lumas[imid] / pow(luma_increase_per_step, i)
+                          for i in range(imid , 0, -1)]  + \
+                         [lumas[imid] * pow(luma_increase_per_step, i-imid)
+                          for i in range(imid, len(evs))]
+
+        pylab.plot(evs, lumas, 'r')
+        pylab.plot(evs, expected_lumas, 'b')
+        matplotlib.pyplot.savefig("%s_plot_means.png" % (NAME))
+
+        luma_diffs = [expected_lumas[i] - lumas[i] for i in range(len(evs))]
+        max_diff = max(abs(i) for i in luma_diffs)
+        avg_diff = abs(numpy.array(luma_diffs)).mean()
+        print "Max delta between modeled and measured lumas:", max_diff
+        print "Avg delta between modeled and measured lumas:", avg_diff
+        assert(max_diff < MAX_LUMA_DELTA_THRESH)
+
+if __name__ == '__main__':
+    main()
diff --git a/apps/CameraITS/tests/scene1/test_ev_compensation_basic.py b/apps/CameraITS/tests/scene1/test_ev_compensation_basic.py
new file mode 100644
index 0000000..13f318f
--- /dev/null
+++ b/apps/CameraITS/tests/scene1/test_ev_compensation_basic.py
@@ -0,0 +1,60 @@
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.device
+import its.objects
+import os.path
+import pylab
+import matplotlib
+import matplotlib.pyplot
+import numpy
+
+def main():
+    """Tests that EV compensation is applied.
+    """
+    NAME = os.path.basename(__file__).split(".")[0]
+
+    with its.device.ItsSession() as cam:
+        props = cam.get_camera_properties()
+
+        evs = range(-4,5)
+        lumas = []
+        for ev in evs:
+            # Re-converge 3A, and lock AE once converged. skip AF trigger as
+            # dark/bright scene could make AF convergence fail and this test
+            # doesn't care the image sharpness.
+            cam.do_3a(ev_comp=ev, lock_ae=True, do_af=False)
+
+            # Capture a single shot with the same EV comp and locked AE.
+            req = its.objects.auto_capture_request()
+            req['android.control.aeExposureCompensation'] = ev
+            req["android.control.aeLock"] = True
+            cap = cam.do_capture(req)
+            y = its.image.convert_capture_to_planes(cap)[0]
+            tile = its.image.get_image_patch(y, 0.45,0.45,0.1,0.1)
+            lumas.append(its.image.compute_image_means(tile)[0])
+
+        pylab.plot(evs, lumas, 'r')
+        matplotlib.pyplot.savefig("%s_plot_means.png" % (NAME))
+
+        luma_diffs = numpy.diff(lumas)
+        min_luma_diffs = min(luma_diffs)
+        print "Min of the luma value difference between adjacent ev comp: ", \
+                min_luma_diffs
+        # All luma brightness should be increasing with increasing ev comp.
+        assert(min_luma_diffs > 0)
+
+if __name__ == '__main__':
+    main()
diff --git a/apps/CameraITS/tests/sensor_fusion/SensorFusion.pdf b/apps/CameraITS/tests/sensor_fusion/SensorFusion.pdf
new file mode 100644
index 0000000..2e390c7
--- /dev/null
+++ b/apps/CameraITS/tests/sensor_fusion/SensorFusion.pdf
Binary files differ
diff --git a/apps/CameraITS/tests/sensor_fusion/test_sensor_fusion.py b/apps/CameraITS/tests/sensor_fusion/test_sensor_fusion.py
new file mode 100644
index 0000000..49f47a9
--- /dev/null
+++ b/apps/CameraITS/tests/sensor_fusion/test_sensor_fusion.py
@@ -0,0 +1,377 @@
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import its.image
+import its.device
+import its.objects
+import time
+import math
+import pylab
+import os.path
+import matplotlib
+import matplotlib.pyplot
+import json
+import Image
+import numpy
+import cv2
+import bisect
+import scipy.spatial
+import sys
+
+NAME = os.path.basename(__file__).split(".")[0]
+
+# Capture 210 QVGA frames (which is 7s at 30fps)
+N = 210
+W,H = 320,240
+
+FEATURE_PARAMS = dict( maxCorners = 50,
+                       qualityLevel = 0.3,
+                       minDistance = 7,
+                       blockSize = 7 )
+
+LK_PARAMS = dict( winSize  = (15, 15),
+                  maxLevel = 2,
+                  criteria = (cv2.TERM_CRITERIA_EPS | cv2.TERM_CRITERIA_COUNT,
+                        10, 0.03))
+
+# Constants to convert between different time units (for clarity).
+SEC_TO_NSEC = 1000*1000*1000.0
+SEC_TO_MSEC = 1000.0
+MSEC_TO_NSEC = 1000*1000.0
+MSEC_TO_SEC = 1/1000.0
+NSEC_TO_SEC = 1/(1000*1000*1000.0)
+NSEC_TO_MSEC = 1/(1000*1000.0)
+
+# Pass/fail thresholds.
+THRESH_MAX_CORR_DIST = 0.005
+THRESH_MAX_SHIFT_MS = 2
+THRESH_MIN_ROT = 0.001
+
+def main():
+    """Test if image and motion sensor events are well synchronized.
+
+    The instructions for running this test are in the SensorFusion.pdf file in
+    the same directory as this test.
+
+    The command-line argument "replay" may be optionally provided. Without this
+    argument, the test will collect a new set of camera+gyro data from the
+    device and then analyze it (and it will also dump this data to files in the
+    current directory). If the "replay" argument is provided, then the script
+    will instead load the dumped data from a previous run and analyze that
+    instead. This can be helpful for developers who are digging for additional
+    information on their measurements.
+    """
+
+    # Collect or load the camera+gyro data. All gyro events as well as camera
+    # timestamps are in the "events" dictionary, and "frames" is a list of
+    # RGB images as numpy arrays.
+    if "replay" not in sys.argv:
+        events, frames = collect_data()
+    else:
+        events, frames = load_data()
+
+    # Compute the camera rotation displacements (rad) between each pair of
+    # adjacent frames.
+    cam_times = get_cam_times(events["cam"])
+    cam_rots = get_cam_rotations(frames)
+    if max(abs(cam_rots)) < THRESH_MIN_ROT:
+        print "Device wasn't moved enough"
+        assert(0)
+
+    # Find the best offset (time-shift) to align the gyro and camera motion
+    # traces; this function integrates the shifted gyro data between camera
+    # samples for a range of candidate shift values, and returns the shift that
+    # result in the best correlation.
+    offset = get_best_alignment_offset(cam_times, cam_rots, events["gyro"])
+
+    # Plot the camera and gyro traces after applying the best shift.
+    cam_times = cam_times + offset*SEC_TO_NSEC
+    gyro_rots = get_gyro_rotations(events["gyro"], cam_times)
+    plot_rotations(cam_rots, gyro_rots)
+
+    # Pass/fail based on the offset and also the correlation distance.
+    dist = scipy.spatial.distance.correlation(cam_rots,gyro_rots)
+    print "Best correlation of %f at shift of %.2fms"%(dist, offset*SEC_TO_MSEC)
+    assert(dist < THRESH_MAX_CORR_DIST)
+    assert(abs(offset) < THRESH_MAX_SHIFT_MS*MSEC_TO_SEC)
+
+def get_best_alignment_offset(cam_times, cam_rots, gyro_events):
+    """Find the best offset to align the camera and gyro traces.
+
+    Uses a correlation distance metric between the curves, where a smaller
+    value means that the curves are better-correlated.
+
+    Args:
+        cam_times: Array of N camera times, one for each frame.
+        cam_rots: Array of N-1 camera rotation displacements (rad).
+        gyro_events: List of gyro event objects.
+
+    Returns:
+        Offset (seconds) of the best alignment.
+    """
+    # Measure the corr. dist. over a shift of up to +/- 100ms (1ms step size).
+    # Get the shift corresponding to the best (lowest) score.
+    candidates = range(-100,101)
+    dists = []
+    for shift in candidates:
+        times = cam_times + shift*MSEC_TO_NSEC
+        gyro_rots = get_gyro_rotations(gyro_events, times)
+        dists.append(scipy.spatial.distance.correlation(cam_rots,gyro_rots))
+    best_corr_dist = min(dists)
+    best_shift = candidates[dists.index(best_corr_dist)]
+
+    # Fit a curve to the corr. dist. data to measure the minima more
+    # accurately, by looking at the correlation distances within a range of
+    # +/- 20ms from the measured best score; note that this will use fewer
+    # than the full +/- 20 range for the curve fit if the measured score
+    # (which is used as the center of the fit) is within 20ms of the edge of
+    # the +/- 100ms candidate range.
+    i = len(dists)/2 + best_shift
+    candidates = candidates[i-20:i+21]
+    dists = dists[i-20:i+21]
+    a,b,c = numpy.polyfit(candidates, dists, 2)
+    exact_best_shift = -b/(2*a)
+    if abs(best_shift - exact_best_shift) > 2.0 or a <= 0 or c <= 0:
+        print "Test failed; bad fit to time-shift curve"
+        assert(0)
+
+    xfit = [x/10.0 for x in xrange(candidates[0]*10,candidates[-1]*10)]
+    yfit = [a*x*x+b*x+c for x in xfit]
+    fig = matplotlib.pyplot.figure()
+    pylab.plot(candidates, dists, 'r', label="data")
+    pylab.plot(xfit, yfit, 'b', label="fit")
+    pylab.plot([exact_best_shift+x for x in [-0.1,0,0.1]], [0,0.01,0], 'b')
+    pylab.xlabel("Relative horizontal shift between curves (ms)")
+    pylab.ylabel("Correlation distance")
+    pylab.legend()
+    matplotlib.pyplot.savefig("%s_plot_shifts.png" % (NAME))
+
+    return exact_best_shift * MSEC_TO_SEC
+
+def plot_rotations(cam_rots, gyro_rots):
+    """Save a plot of the camera vs. gyro rotational measurements.
+
+    Args:
+        cam_rots: Array of N-1 camera rotation measurements (rad).
+        gyro_rots: Array of N-1 gyro rotation measurements (rad).
+    """
+    # For the plot, scale the rotations to be in degrees.
+    fig = matplotlib.pyplot.figure()
+    cam_rots = cam_rots * (360/(2*math.pi))
+    gyro_rots = gyro_rots * (360/(2*math.pi))
+    pylab.plot(range(len(cam_rots)), cam_rots, 'r', label="camera")
+    pylab.plot(range(len(gyro_rots)), gyro_rots, 'b', label="gyro")
+    pylab.legend()
+    pylab.xlabel("Camera frame number")
+    pylab.ylabel("Angular displacement between adjacent camera frames (deg)")
+    pylab.xlim([0, len(cam_rots)])
+    matplotlib.pyplot.savefig("%s_plot.png" % (NAME))
+
+def get_gyro_rotations(gyro_events, cam_times):
+    """Get the rotation values of the gyro.
+
+    Integrates the gyro data between each camera frame to compute an angular
+    displacement. Uses simple Euler approximation to implement the
+    integration.
+
+    Args:
+        gyro_events: List of gyro event objects.
+        cam_times: Array of N camera times, one for each frame.
+
+    Returns:
+        Array of N-1 gyro rotation measurements (rad).
+    """
+    all_times = numpy.array([e["time"] for e in gyro_events])
+    all_rots = numpy.array([e["z"] for e in gyro_events])
+    gyro_rots = []
+    # Integrate the gyro data between each pair of camera frame times.
+    for icam in range(len(cam_times)-1):
+        # Get the window of gyro samples within the current pair of frames.
+        tcam0 = cam_times[icam]
+        tcam1 = cam_times[icam+1]
+        igyrowindow0 = bisect.bisect(all_times, tcam0)
+        igyrowindow1 = bisect.bisect(all_times, tcam1)
+        sgyro = 0
+        # Integrate samples within the window.
+        for igyro in range(igyrowindow0, igyrowindow1):
+            vgyro0 = all_rots[igyro]
+            vgyro1 = all_rots[igyro+1]
+            tgyro0 = all_times[igyro]
+            tgyro1 = all_times[igyro+1]
+            vgyro = 0.5 * (vgyro0 + vgyro1)
+            deltatgyro = (tgyro1 - tgyro0) * NSEC_TO_SEC
+            sgyro += vgyro * deltatgyro
+        # Handle the fractional intervals at the sides of the window.
+        for side,igyro in enumerate([igyrowindow0-1, igyrowindow1]):
+            vgyro0 = all_rots[igyro]
+            vgyro1 = all_rots[igyro+1]
+            tgyro0 = all_times[igyro]
+            tgyro1 = all_times[igyro+1]
+            vgyro = 0.5 * (vgyro0 + vgyro1)
+            deltatgyro = (tgyro1 - tgyro0) * NSEC_TO_SEC
+            if side == 0:
+                f = (tcam0 - tgyro0) / (tgyro1 - tgyro0)
+                sgyro += vgyro * deltatgyro * (1.0 - f)
+            else:
+                f = (tcam1 - tgyro0) / (tgyro1 - tgyro0)
+                sgyro += vgyro * deltatgyro * f
+        gyro_rots.append(sgyro)
+    gyro_rots = numpy.array(gyro_rots)
+    return gyro_rots
+
+def get_cam_rotations(frames):
+    """Get the rotations of the camera between each pair of frames.
+
+    Takes N frames and returns N-1 angular displacements corresponding to the
+    rotations between adjacent pairs of frames, in radians.
+
+    Args:
+        frames: List of N images (as RGB numpy arrays).
+
+    Returns:
+        Array of N-1 camera rotation measurements (rad).
+    """
+    gframes = []
+    for frame in frames:
+        frame = (frame * 255.0).astype(numpy.uint8)
+        gframes.append(cv2.cvtColor(frame, cv2.COLOR_RGB2GRAY))
+    rots = []
+    for i in range(1,len(gframes)):
+        gframe0 = gframes[i-1]
+        gframe1 = gframes[i]
+        p0 = cv2.goodFeaturesToTrack(gframe0, mask=None, **FEATURE_PARAMS)
+        p1,st,_ = cv2.calcOpticalFlowPyrLK(gframe0, gframe1, p0, None,
+                **LK_PARAMS)
+        tform = procrustes_rotation(p0[st==1], p1[st==1])
+        # TODO: Choose the sign for the rotation so the cam matches the gyro
+        rot = -math.atan2(tform[0, 1], tform[0, 0])
+        rots.append(rot)
+        if i == 1:
+            # Save a debug visualization of the features that are being
+            # tracked in the first frame.
+            frame = frames[i]
+            for x,y in p0[st==1]:
+                cv2.circle(frame, (x,y), 3, (100,100,255), -1)
+            its.image.write_image(frame, "%s_features.jpg"%(NAME))
+    return numpy.array(rots)
+
+def get_cam_times(cam_events):
+    """Get the camera frame times.
+
+    Args:
+        cam_events: List of (start_exposure, exposure_time, readout_duration)
+            tuples, one per captured frame, with times in nanoseconds.
+
+    Returns:
+        frame_times: Array of N times, one corresponding to the "middle" of
+            the exposure of each frame.
+    """
+    # Assign a time to each frame that assumes that the image is instantly
+    # captured in the middle of its exposure.
+    starts = numpy.array([start for start,exptime,readout in cam_events])
+    exptimes = numpy.array([exptime for start,exptime,readout in cam_events])
+    readouts = numpy.array([readout for start,exptime,readout in cam_events])
+    frame_times = starts + (exptimes + readouts) / 2.0
+    return frame_times
+
+def load_data():
+    """Load a set of previously captured data.
+
+    Returns:
+        events: Dictionary containing all gyro events and cam timestamps.
+        frames: List of RGB images as numpy arrays.
+    """
+    with open("%s_events.txt"%(NAME), "r") as f:
+        events = json.loads(f.read())
+    n = len(events["cam"])
+    frames = []
+    for i in range(n):
+        img = Image.open("%s_frame%03d.jpg"%(NAME,i))
+        w,h = img.size[0:2]
+        frames.append(numpy.array(img).reshape(h,w,3) / 255.0)
+    return events, frames
+
+def collect_data():
+    """Capture a new set of data from the device.
+
+    Captures both motion data and camera frames, while the user is moving
+    the device in a proscribed manner.
+
+    Returns:
+        events: Dictionary containing all gyro events and cam timestamps.
+        frames: List of RGB images as numpy arrays.
+    """
+    with its.device.ItsSession() as cam:
+        print "Starting sensor event collection"
+        cam.start_sensor_events()
+
+        # Sleep a few seconds for gyro events to stabilize.
+        time.sleep(5)
+
+        # TODO: Ensure that OIS is disabled; set to DISABLE and wait some time.
+
+        # Capture the frames.
+        props = cam.get_camera_properties()
+        fmt = {"format":"yuv", "width":W, "height":H}
+        s,e,_,_,_ = cam.do_3a(get_results=True)
+        req = its.objects.manual_capture_request(s, e)
+        print "Capturing %dx%d with sens. %d, exp. time %.1fms" % (
+                W, H, s, e*NSEC_TO_MSEC)
+        caps = cam.do_capture([req]*N, fmt)
+
+        # Get the gyro events.
+        print "Reading out sensor events"
+        gyro = cam.get_sensor_events()["gyro"]
+
+        # Combine the events into a single structure.
+        print "Dumping event data"
+        starts = [c["metadata"]["android.sensor.timestamp"] for c in caps]
+        exptimes = [c["metadata"]["android.sensor.exposureTime"] for c in caps]
+        readouts = [c["metadata"]["android.sensor.rollingShutterSkew"]
+                    for c in caps]
+        events = {"gyro": gyro, "cam": zip(starts,exptimes,readouts)}
+        with open("%s_events.txt"%(NAME), "w") as f:
+            f.write(json.dumps(events))
+
+        # Convert the frames to RGB.
+        print "Dumping frames"
+        frames = []
+        for i,c in enumerate(caps):
+            img = its.image.convert_capture_to_rgb_image(c)
+            frames.append(img)
+            its.image.write_image(img, "%s_frame%03d.jpg"%(NAME,i))
+
+        return events, frames
+
+def procrustes_rotation(X, Y):
+    """
+    Procrustes analysis determines a linear transformation (translation,
+    reflection, orthogonal rotation and scaling) of the points in Y to best
+    conform them to the points in matrix X, using the sum of squared errors
+    as the goodness of fit criterion.
+
+    Args:
+        X, Y: Matrices of target and input coordinates.
+
+    Returns:
+        The rotation component of the transformation that maps X to Y.
+    """
+    X0 = (X-X.mean(0)) / numpy.sqrt(((X-X.mean(0))**2.0).sum())
+    Y0 = (Y-Y.mean(0)) / numpy.sqrt(((Y-Y.mean(0))**2.0).sum())
+    U,s,Vt = numpy.linalg.svd(numpy.dot(X0.T, Y0),full_matrices=False)
+    return numpy.dot(Vt.T, U.T)
+
+if __name__ == '__main__':
+    main()
+
diff --git a/apps/CameraITS/tools/run_all_tests.py b/apps/CameraITS/tools/run_all_tests.py
index 2202d5b..f5a53b1 100644
--- a/apps/CameraITS/tools/run_all_tests.py
+++ b/apps/CameraITS/tools/run_all_tests.py
@@ -38,6 +38,8 @@
             "test_ae_precapture_trigger",
             "test_black_white",
             "test_crop_region_raw",
+            "test_ev_compensation_advanced",
+            "test_ev_compensation_basic",
             "test_locked_burst",
             "test_yuv_plus_jpeg"
         ]
diff --git a/apps/CtsVerifier/AndroidManifest.xml b/apps/CtsVerifier/AndroidManifest.xml
index 94c6a0c..98044e4 100644
--- a/apps/CtsVerifier/AndroidManifest.xml
+++ b/apps/CtsVerifier/AndroidManifest.xml
@@ -18,7 +18,7 @@
 <manifest xmlns:android="http://schemas.android.com/apk/res/android"
       package="com.android.cts.verifier"
       android:versionCode="5"
-      android:versionName="5.0_r1.9">
+      android:versionName="5.0_r1.91">
 
     <uses-sdk android:minSdkVersion="19" android:targetSdkVersion="21"/>
 
diff --git a/apps/CtsVerifier/res/layout/test_list_footer.xml b/apps/CtsVerifier/res/layout/test_list_footer.xml
new file mode 100644
index 0000000..fdb8e43
--- /dev/null
+++ b/apps/CtsVerifier/res/layout/test_list_footer.xml
@@ -0,0 +1,38 @@
+<?xml version="1.0" encoding="utf-8"?>
+
+<!--
+  ~ Copyright (C) 2014 The Android Open Source Project
+  ~
+  ~ Licensed under the Apache License, Version 2.0 (the "License");
+  ~ you may not use this file except in compliance with the License.
+  ~ You may obtain a copy of the License at
+  ~
+  ~      http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License
+  -->
+<GridLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    android:orientation="horizontal"
+    android:layout_width="match_parent"
+    android:layout_height="wrap_content">
+
+    <Button
+        android:id="@+id/clear"
+        android:text="@string/clear"
+        android:layout_width="wrap_content"
+        android:layout_height="wrap_content" />
+    <Button
+        android:id="@+id/view"
+        android:text="@string/view"
+        android:layout_width="wrap_content"
+        android:layout_height="wrap_content" />
+    <Button
+        android:id="@+id/export"
+        android:text="@string/export"
+        android:layout_width="wrap_content"
+        android:layout_height="wrap_content" />
+</GridLayout>
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/TestListActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/TestListActivity.java
index 1cc3547..8cfc6df 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/TestListActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/TestListActivity.java
@@ -23,16 +23,23 @@
 import android.view.Menu;
 import android.view.MenuInflater;
 import android.view.MenuItem;
+import android.view.View;
+import android.view.Window;
 import android.widget.Toast;
 
 import java.io.IOException;
 
 /** Top-level {@link ListActivity} for launching tests and managing results. */
-public class TestListActivity extends AbstractTestListActivity {
+public class TestListActivity extends AbstractTestListActivity implements View.OnClickListener {
 
     private static final String TAG = TestListActivity.class.getSimpleName();
 
     @Override
+    public void onClick (View v) {
+        handleMenuItemSelected(v.getId());
+    }
+
+    @Override
     protected void onCreate(Bundle savedInstanceState) {
         super.onCreate(savedInstanceState);
 
@@ -41,6 +48,17 @@
         }
 
         setTitle(getString(R.string.title_version, Version.getVersionName(this)));
+
+        if (!getWindow().hasFeature(Window.FEATURE_ACTION_BAR)) {
+            View footer = getLayoutInflater().inflate(R.layout.test_list_footer, null);
+
+            footer.findViewById(R.id.clear).setOnClickListener(this);
+            footer.findViewById(R.id.view).setOnClickListener(this);
+            footer.findViewById(R.id.export).setOnClickListener(this);
+
+            getListView().addFooterView(footer);
+        }
+
         setTestListAdapter(new ManifestTestListAdapter(this, null));
     }
 
@@ -53,22 +71,7 @@
 
     @Override
     public boolean onOptionsItemSelected(MenuItem item) {
-        switch (item.getItemId()) {
-            case R.id.clear:
-                handleClearItemSelected();
-                return true;
-
-            case R.id.view:
-                handleViewItemSelected();
-                return true;
-
-            case R.id.export:
-                handleExportItemSelected();
-                return true;
-
-            default:
-                return super.onOptionsItemSelected(item);
-        }
+        return handleMenuItemSelected(item.getItemId()) ? true : super.onOptionsItemSelected(item);
     }
 
     private void handleClearItemSelected() {
@@ -91,4 +94,23 @@
     private void handleExportItemSelected() {
         new ReportExporter(this, mAdapter).execute();
     }
+
+    private boolean handleMenuItemSelected(int id) {
+        switch (id) {
+            case R.id.clear:
+                handleClearItemSelected();
+                return true;
+
+            case R.id.view:
+                handleViewItemSelected();
+                return true;
+
+            case R.id.export:
+                handleExportItemSelected();
+                return true;
+
+            default:
+                return false;
+        }
+    }
 }
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsService.java b/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsService.java
index e340c8a..a305cd2 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsService.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsService.java
@@ -112,6 +112,7 @@
     public static final String TRIGGER_AE_KEY = "ae";
     public static final String TRIGGER_AF_KEY = "af";
     public static final String VIB_PATTERN_KEY = "pattern";
+    public static final String EVCOMP_KEY = "evComp";
 
     private CameraManager mCameraManager = null;
     private HandlerThread mCameraThread = null;
@@ -802,6 +803,12 @@
             mNeedsLockedAE = params.optBoolean(LOCK_AE_KEY, false);
             mNeedsLockedAWB = params.optBoolean(LOCK_AWB_KEY, false);
 
+            // An EV compensation can be specified as part of AE convergence.
+            int evComp = params.optInt(EVCOMP_KEY, 0);
+            if (evComp != 0) {
+                Logt.i(TAG, String.format("Running 3A with AE exposure compensation value: %d", evComp));
+            }
+
             // By default, AE and AF both get triggered, but the user can optionally override this.
             // Also, AF won't get triggered if the lens is fixed-focus.
             boolean doAE = true;
@@ -845,7 +852,11 @@
                 // at a time, to simplify the logic here.
                 if (!mInterlock3A.block(TIMEOUT_3A * 1000) ||
                         System.currentTimeMillis() - tstart > TIMEOUT_3A * 1000) {
-                    throw new ItsException("3A failed to converge (timeout)");
+                    throw new ItsException(
+                            "3A failed to converge after " + TIMEOUT_3A + " seconds.\n" +
+                            "AE converge state: " + mConvergedAE + ", \n" +
+                            "AF convergence state: " + mConvergedAF + ", \n" +
+                            "AWB convergence state: " + mConvergedAWB + ".");
                 }
                 mInterlock3A.close();
 
@@ -876,6 +887,10 @@
                     req.set(CaptureRequest.CONTROL_AWB_LOCK, false);
                     req.set(CaptureRequest.CONTROL_AWB_REGIONS, regionAWB);
 
+                    if (evComp != 0) {
+                        req.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, evComp);
+                    }
+
                     if (mConvergedAE && mNeedsLockedAE) {
                         req.set(CaptureRequest.CONTROL_AE_LOCK, true);
                     }
diff --git a/hostsidetests/monkey/src/com/android/cts/monkey/MonkeyTest.java b/hostsidetests/monkey/src/com/android/cts/monkey/MonkeyTest.java
index f141d8fa..997f7c6 100644
--- a/hostsidetests/monkey/src/com/android/cts/monkey/MonkeyTest.java
+++ b/hostsidetests/monkey/src/com/android/cts/monkey/MonkeyTest.java
@@ -37,7 +37,8 @@
     }
 
     private void assertIsUserAMonkey(boolean isMonkey) throws DeviceNotAvailableException {
-        String logs = mDevice.executeAdbCommand("logcat", "-d", "MonkeyActivity:I", "*:S");
+        String logs = mDevice.executeAdbCommand(
+                "logcat", "-v", "brief", "-d", "MonkeyActivity:I", "*:S");
         boolean monkeyLogsFound = false;
         Scanner s = new Scanner(logs);
         try {
diff --git a/hostsidetests/sample/src/android/sample/cts/SampleHostTest.java b/hostsidetests/sample/src/android/sample/cts/SampleHostTest.java
index 3cc4aa9..ab7e0b0 100644
--- a/hostsidetests/sample/src/android/sample/cts/SampleHostTest.java
+++ b/hostsidetests/sample/src/android/sample/cts/SampleHostTest.java
@@ -123,7 +123,7 @@
         // Start the APK and wait for it to complete.
         mDevice.executeShellCommand(START_COMMAND);
         // Dump logcat.
-        String logs = mDevice.executeAdbCommand("logcat", "-d", CLASS + ":I", "*:S");
+        String logs = mDevice.executeAdbCommand("logcat", "-v", "brief", "-d", CLASS + ":I", "*:S");
         // Search for string.
         String testString = "";
         Scanner in = new Scanner(logs);
diff --git a/hostsidetests/theme/src/android/theme/cts/ThemeHostTest.java b/hostsidetests/theme/src/android/theme/cts/ThemeHostTest.java
index 90a0c72..da94b15 100644
--- a/hostsidetests/theme/src/android/theme/cts/ThemeHostTest.java
+++ b/hostsidetests/theme/src/android/theme/cts/ThemeHostTest.java
@@ -322,7 +322,8 @@
         boolean waiting = true;
         while (waiting) {
             // Dump logcat.
-            final String logs = mDevice.executeAdbCommand("logcat", "-d", CLASS + ":I", "*:S");
+            final String logs = mDevice.executeAdbCommand(
+                    "logcat", "-v", "brief", "-d", CLASS + ":I", "*:S");
             // Search for string.
             final Scanner in = new Scanner(logs);
             while (in.hasNextLine()) {
diff --git a/hostsidetests/usb/src/com/android/cts/usb/TestUsbTest.java b/hostsidetests/usb/src/com/android/cts/usb/TestUsbTest.java
index 4736e51..3af52c0 100644
--- a/hostsidetests/usb/src/com/android/cts/usb/TestUsbTest.java
+++ b/hostsidetests/usb/src/com/android/cts/usb/TestUsbTest.java
@@ -40,13 +40,11 @@
  */
 public class TestUsbTest extends DeviceTestCase implements IAbiReceiver, IBuildReceiver {
 
-    private static final String LOG_TAG = "TestUsbTest";
     private static final String CTS_RUNNER = "android.support.test.runner.AndroidJUnitRunner";
     private static final String PACKAGE_NAME = "com.android.cts.usb.serialtest";
     private static final String APK_NAME="CtsUsbSerialTestApp.apk";
     private ITestDevice mDevice;
     private IAbi mAbi;
-    private String mAbiBitness;
     private CtsBuildHelper mBuild;
 
     @Override
@@ -118,7 +116,8 @@
         if (runResult.isRunFailure()) {
             fail(runResult.getRunFailureMessage());
         }
-        String logs = mDevice.executeAdbCommand("logcat", "-d", "CtsUsbSerialTest:W", "*:S");
+        String logs = mDevice.executeAdbCommand(
+                "logcat", "-v", "brief", "-d", "CtsUsbSerialTest:W", "*:S");
         pattern = Pattern.compile("^.*CtsUsbSerialTest\\(.*\\):\\s+([a-zA-Z0-9]{6,20})",
                 Pattern.MULTILINE);
         matcher = pattern.matcher(logs);
diff --git a/tests/core/runner/src/com/android/cts/runner/CtsTestRunListener.java b/tests/core/runner/src/com/android/cts/runner/CtsTestRunListener.java
index 5196df1..5f67475 100644
--- a/tests/core/runner/src/com/android/cts/runner/CtsTestRunListener.java
+++ b/tests/core/runner/src/com/android/cts/runner/CtsTestRunListener.java
@@ -35,6 +35,7 @@
 import java.net.CookieHandler;
 import java.net.ResponseCache;
 import java.util.Locale;
+import java.util.Properties;
 import java.util.TimeZone;
 
 import javax.net.ssl.HostnameVerifier;
@@ -57,7 +58,7 @@
 
     @Override
     public void testRunStarted(Description description) throws Exception {
-        mEnvironment = new TestEnvironment();
+        mEnvironment = new TestEnvironment(getInstrumentation().getContext());
 
         // We might want to move this to /sdcard, if is is mounted/writable.
         File cacheDir = getInstrumentation().getTargetContext().getCacheDir();
@@ -149,21 +150,28 @@
     static class TestEnvironment {
         private final Locale mDefaultLocale;
         private final TimeZone mDefaultTimeZone;
-        private final String mJavaIoTmpDir;
         private final HostnameVerifier mHostnameVerifier;
         private final SSLSocketFactory mSslSocketFactory;
+        private final Properties mProperties = new Properties();
 
-        TestEnvironment() {
+        TestEnvironment(Context context) {
             mDefaultLocale = Locale.getDefault();
             mDefaultTimeZone = TimeZone.getDefault();
-            mJavaIoTmpDir = System.getProperty("java.io.tmpdir");
             mHostnameVerifier = HttpsURLConnection.getDefaultHostnameVerifier();
             mSslSocketFactory = HttpsURLConnection.getDefaultSSLSocketFactory();
+
+            mProperties.setProperty("java.io.tmpdir", System.getProperty("java.io.tmpdir"));
+            // The CDD mandates that devices that support WiFi are the only ones that will have 
+            // multicast.
+            PackageManager pm = context.getPackageManager();
+            mProperties.setProperty("android.cts.device.multicast",
+                    Boolean.toString(pm.hasSystemFeature(PackageManager.FEATURE_WIFI)));
+
         }
 
         void reset() {
             System.setProperties(null);
-            System.setProperty("java.io.tmpdir", mJavaIoTmpDir);
+            System.setProperties(mProperties);
             Locale.setDefault(mDefaultLocale);
             TimeZone.setDefault(mDefaultTimeZone);
             Authenticator.setDefault(null);
diff --git a/tests/tests/media/src/android/media/cts/CamcorderProfileTest.java b/tests/tests/media/src/android/media/cts/CamcorderProfileTest.java
index 8130a9a..7dfb1f6 100644
--- a/tests/tests/media/src/android/media/cts/CamcorderProfileTest.java
+++ b/tests/tests/media/src/android/media/cts/CamcorderProfileTest.java
@@ -25,12 +25,47 @@
 import android.test.AndroidTestCase;
 import android.util.Log;
 
+import java.util.Arrays;
 import java.util.List;
 
 public class CamcorderProfileTest extends AndroidTestCase {
 
     private static final String TAG = "CamcorderProfileTest";
     private static final int MIN_HIGH_SPEED_FPS = 100;
+    private static final Integer[] ALL_SUPPORTED_QUALITIES = {
+        CamcorderProfile.QUALITY_LOW,
+        CamcorderProfile.QUALITY_HIGH,
+        CamcorderProfile.QUALITY_QCIF,
+        CamcorderProfile.QUALITY_CIF,
+        CamcorderProfile.QUALITY_480P,
+        CamcorderProfile.QUALITY_720P,
+        CamcorderProfile.QUALITY_1080P,
+        CamcorderProfile.QUALITY_QVGA,
+        CamcorderProfile.QUALITY_2160P,
+        CamcorderProfile.QUALITY_TIME_LAPSE_LOW,
+        CamcorderProfile.QUALITY_TIME_LAPSE_HIGH,
+        CamcorderProfile.QUALITY_TIME_LAPSE_QCIF,
+        CamcorderProfile.QUALITY_TIME_LAPSE_CIF,
+        CamcorderProfile.QUALITY_TIME_LAPSE_480P,
+        CamcorderProfile.QUALITY_TIME_LAPSE_720P,
+        CamcorderProfile.QUALITY_TIME_LAPSE_1080P,
+        CamcorderProfile.QUALITY_TIME_LAPSE_QVGA,
+        CamcorderProfile.QUALITY_TIME_LAPSE_2160P,
+        CamcorderProfile.QUALITY_HIGH_SPEED_LOW,
+        CamcorderProfile.QUALITY_HIGH_SPEED_HIGH,
+        CamcorderProfile.QUALITY_HIGH_SPEED_480P,
+        CamcorderProfile.QUALITY_HIGH_SPEED_720P,
+        CamcorderProfile.QUALITY_HIGH_SPEED_1080P,
+        CamcorderProfile.QUALITY_HIGH_SPEED_2160P
+    };
+    private static final int LAST_QUALITY = CamcorderProfile.QUALITY_2160P;
+    private static final int LAST_TIMELAPSE_QUALITY = CamcorderProfile.QUALITY_TIME_LAPSE_1080P;
+    private static final int LAST_HIGH_SPEED_QUALITY = CamcorderProfile.QUALITY_HIGH_SPEED_2160P;
+    private static final Integer[] UNKNOWN_QUALITIES = {
+        LAST_QUALITY + 1, // Unknown normal profile quality
+        LAST_TIMELAPSE_QUALITY + 1, // Unknown timelapse profile quality
+        LAST_HIGH_SPEED_QUALITY + 1 // Unknown high speed timelapse profile quality
+    };
 
     // Uses get without id if cameraId == -1 and get with id otherwise.
     private CamcorderProfile getWithOptionalId(int quality, int cameraId) {
@@ -59,27 +94,7 @@
             profile.audioSampleRate,
             profile.audioChannels));
         assertTrue(profile.duration > 0);
-        assertTrue(profile.quality == CamcorderProfile.QUALITY_LOW ||
-                   profile.quality == CamcorderProfile.QUALITY_HIGH ||
-                   profile.quality == CamcorderProfile.QUALITY_QCIF ||
-                   profile.quality == CamcorderProfile.QUALITY_CIF ||
-                   profile.quality == CamcorderProfile.QUALITY_480P ||
-                   profile.quality == CamcorderProfile.QUALITY_720P ||
-                   profile.quality == CamcorderProfile.QUALITY_1080P ||
-                   profile.quality == CamcorderProfile.QUALITY_2160P ||
-                   profile.quality == CamcorderProfile.QUALITY_TIME_LAPSE_LOW ||
-                   profile.quality == CamcorderProfile.QUALITY_TIME_LAPSE_HIGH ||
-                   profile.quality == CamcorderProfile.QUALITY_TIME_LAPSE_QCIF ||
-                   profile.quality == CamcorderProfile.QUALITY_TIME_LAPSE_CIF ||
-                   profile.quality == CamcorderProfile.QUALITY_TIME_LAPSE_480P ||
-                   profile.quality == CamcorderProfile.QUALITY_TIME_LAPSE_720P ||
-                   profile.quality == CamcorderProfile.QUALITY_TIME_LAPSE_1080P ||
-                   profile.quality == CamcorderProfile.QUALITY_TIME_LAPSE_2160P ||
-                   profile.quality == CamcorderProfile.QUALITY_HIGH_SPEED_LOW ||
-                   profile.quality == CamcorderProfile.QUALITY_HIGH_SPEED_HIGH ||
-                   profile.quality == CamcorderProfile.QUALITY_HIGH_SPEED_480P ||
-                   profile.quality == CamcorderProfile.QUALITY_HIGH_SPEED_720P ||
-                   profile.quality == CamcorderProfile.QUALITY_HIGH_SPEED_1080P);
+        assertTrue(Arrays.asList(ALL_SUPPORTED_QUALITIES).contains(profile.quality));
         assertTrue(profile.videoBitRate > 0);
         assertTrue(profile.videoFrameRate > 0);
         assertTrue(profile.videoFrameWidth > 0);
@@ -233,19 +248,30 @@
 
         final List<Size> videoSizes = getSupportedVideoSizes(cameraId);
 
-        CamcorderProfile lowProfile =
-            getWithOptionalId(CamcorderProfile.QUALITY_LOW, cameraId);
-        CamcorderProfile highProfile =
-            getWithOptionalId(CamcorderProfile.QUALITY_HIGH, cameraId);
-        checkProfile(lowProfile, videoSizes);
-        checkProfile(highProfile, videoSizes);
+        /**
+         * Check all possible supported profiles: get profile should work, and the profile
+         * should be sane. Note that, timelapse and high speed video sizes may not be listed
+         * as supported video sizes from camera, skip the size check.
+         */
+        for (Integer quality : ALL_SUPPORTED_QUALITIES) {
+            if (CamcorderProfile.hasProfile(cameraId, quality) || isProfileMandatory(quality)) {
+                List<Size> videoSizesToCheck = null;
+                if (quality >= CamcorderProfile.QUALITY_LOW &&
+                                quality <= CamcorderProfile.QUALITY_2160P) {
+                    videoSizesToCheck = videoSizes;
+                }
+                CamcorderProfile profile = getWithOptionalId(quality, cameraId);
+                checkProfile(profile, videoSizesToCheck);
+            }
+        }
 
-        CamcorderProfile lowTimeLapseProfile =
-            getWithOptionalId(CamcorderProfile.QUALITY_TIME_LAPSE_LOW, cameraId);
-        CamcorderProfile highTimeLapseProfile =
-            getWithOptionalId(CamcorderProfile.QUALITY_TIME_LAPSE_HIGH, cameraId);
-        checkProfile(lowTimeLapseProfile, null);
-        checkProfile(highTimeLapseProfile, null);
+        /**
+         * Check unknown profiles: hasProfile() should return false.
+         */
+        for (Integer quality : UNKNOWN_QUALITIES) {
+            assertFalse("Unknown profile quality " + quality + " shouldn't be supported by camera "
+                    + cameraId, CamcorderProfile.hasProfile(cameraId, quality));
+        }
 
         // High speed low and high profile are optional,
         // but they should be both present or missing.
@@ -288,8 +314,17 @@
 
         int[] specificHighSpeedProfileQualities = {CamcorderProfile.QUALITY_HIGH_SPEED_480P,
                                                    CamcorderProfile.QUALITY_HIGH_SPEED_720P,
-                                                   CamcorderProfile.QUALITY_HIGH_SPEED_1080P};
+                                                   CamcorderProfile.QUALITY_HIGH_SPEED_1080P,
+                                                   CamcorderProfile.QUALITY_HIGH_SPEED_2160P};
 
+        CamcorderProfile lowProfile =
+                getWithOptionalId(CamcorderProfile.QUALITY_LOW, cameraId);
+        CamcorderProfile highProfile =
+                getWithOptionalId(CamcorderProfile.QUALITY_HIGH, cameraId);
+        CamcorderProfile lowTimeLapseProfile =
+                getWithOptionalId(CamcorderProfile.QUALITY_TIME_LAPSE_LOW, cameraId);
+        CamcorderProfile highTimeLapseProfile =
+                getWithOptionalId(CamcorderProfile.QUALITY_TIME_LAPSE_HIGH, cameraId);
         checkSpecificProfiles(cameraId, lowProfile, highProfile,
                 specificProfileQualities, videoSizes);
         checkSpecificProfiles(cameraId, lowTimeLapseProfile, highTimeLapseProfile,
@@ -342,4 +377,11 @@
         Log.e(TAG, "Size (" + width + "x" + height + ") is not supported");
         return false;
     }
+
+    private boolean isProfileMandatory(int quality) {
+        return (quality == CamcorderProfile.QUALITY_LOW) ||
+                (quality == CamcorderProfile.QUALITY_HIGH) ||
+                (quality == CamcorderProfile.QUALITY_TIME_LAPSE_LOW) ||
+                (quality == CamcorderProfile.QUALITY_TIME_LAPSE_HIGH);
+    }
 }
diff --git a/tests/tests/media/src/android/media/cts/EncodeVirtualDisplayWithCompositionTest.java b/tests/tests/media/src/android/media/cts/EncodeVirtualDisplayWithCompositionTest.java
index 7b21997..9c99c2d 100644
--- a/tests/tests/media/src/android/media/cts/EncodeVirtualDisplayWithCompositionTest.java
+++ b/tests/tests/media/src/android/media/cts/EncodeVirtualDisplayWithCompositionTest.java
@@ -140,7 +140,8 @@
         Log.i(TAG, "testRendering800x480Locally");
         Pair<Integer, Integer> maxRes = checkMaxConcurrentEncodingDecodingResolution();
         if (maxRes == null) {
-            fail("codec not supported");
+            Log.i(TAG, "SKIPPING testRendering800x480Locally(): codec not supported");
+            return;
         }
         if (maxRes.first >= 800 && maxRes.second >= 480) {
             runTestRenderingInSeparateThread(800, 480, false, false);
@@ -153,7 +154,8 @@
         Log.i(TAG, "testRenderingMaxResolutionLocally");
         Pair<Integer, Integer> maxRes = checkMaxConcurrentEncodingDecodingResolution();
         if (maxRes == null) {
-            fail("codec not supported");
+            Log.i(TAG, "SKIPPING testRenderingMaxResolutionLocally(): codec not supported");
+            return;
         }
         Log.w(TAG, "Trying resolution w:" + maxRes.first + " h:" + maxRes.second);
         runTestRenderingInSeparateThread(maxRes.first, maxRes.second, false, false);
@@ -163,7 +165,8 @@
         Log.i(TAG, "testRendering800x480Remotely");
         Pair<Integer, Integer> maxRes = checkMaxConcurrentEncodingDecodingResolution();
         if (maxRes == null) {
-            fail("codec not supported");
+            Log.i(TAG, "SKIPPING testRendering800x480Remotely(): codec not supported");
+            return;
         }
         if (maxRes.first >= 800 && maxRes.second >= 480) {
             runTestRenderingInSeparateThread(800, 480, true, false);
@@ -176,7 +179,8 @@
         Log.i(TAG, "testRenderingMaxResolutionRemotely");
         Pair<Integer, Integer> maxRes = checkMaxConcurrentEncodingDecodingResolution();
         if (maxRes == null) {
-            fail("codec not supported");
+            Log.i(TAG, "SKIPPING testRenderingMaxResolutionRemotely(): codec not supported");
+            return;
         }
         Log.w(TAG, "Trying resolution w:" + maxRes.first + " h:" + maxRes.second);
         runTestRenderingInSeparateThread(maxRes.first, maxRes.second, true, false);
@@ -186,7 +190,8 @@
         Log.i(TAG, "testRendering800x480RemotelyWith3Windows");
         Pair<Integer, Integer> maxRes = checkMaxConcurrentEncodingDecodingResolution();
         if (maxRes == null) {
-            fail("codec not supported");
+            Log.i(TAG, "SKIPPING testRendering800x480RemotelyWith3Windows(): codec not supported");
+            return;
         }
         if (maxRes.first >= 800 && maxRes.second >= 480) {
             runTestRenderingInSeparateThread(800, 480, true, true);
@@ -199,7 +204,8 @@
         Log.i(TAG, "testRendering800x480LocallyWith3Windows");
         Pair<Integer, Integer> maxRes = checkMaxConcurrentEncodingDecodingResolution();
         if (maxRes == null) {
-            fail("codec not supported");
+            Log.i(TAG, "SKIPPING testRendering800x480LocallyWith3Windows(): codec not supported");
+            return;
         }
         if (maxRes.first >= 800 && maxRes.second >= 480) {
             runTestRenderingInSeparateThread(800, 480, false, true);
diff --git a/tests/tests/media/src/android/media/cts/ImageReaderDecoderTest.java b/tests/tests/media/src/android/media/cts/ImageReaderDecoderTest.java
index d620995..9528db9 100644
--- a/tests/tests/media/src/android/media/cts/ImageReaderDecoderTest.java
+++ b/tests/tests/media/src/android/media/cts/ImageReaderDecoderTest.java
@@ -100,6 +100,10 @@
      * to be supported by hw decoder.
      */
     public void testHwAVCDecode360pForFlexibleYuv() throws Exception {
+        if (!MediaPlayerTestBase.hasH264(false)) {
+            Log.i(TAG, "SKIPPING testSwAVCDecode360pForFlexibleYuv(): no codec found.");
+            return;
+        }
         try {
             int format = ImageFormat.YUV_420_888;
             videoDecodeToSurface(
@@ -115,6 +119,10 @@
      * to be supported by sw decoder.
      */
     public void testSwAVCDecode360pForFlexibleYuv() throws Exception {
+        if (!MediaPlayerTestBase.hasH264(false)) {
+            Log.i(TAG, "SKIPPING testSwAVCDecode360pForFlexibleYuv(): no codec found.");
+            return;
+        }
         try {
             int format = ImageFormat.YUV_420_888;
             videoDecodeToSurface(
diff --git a/tests/tests/media/src/android/media/cts/MediaPlayerFlakyNetworkTest.java b/tests/tests/media/src/android/media/cts/MediaPlayerFlakyNetworkTest.java
index 8063cbb..c5cd04e 100644
--- a/tests/tests/media/src/android/media/cts/MediaPlayerFlakyNetworkTest.java
+++ b/tests/tests/media/src/android/media/cts/MediaPlayerFlakyNetworkTest.java
@@ -92,7 +92,10 @@
         doPlayStreams(6, 0.00002f);
     }
 
-   private void doPlayStreams(int seed, float probability) throws Throwable {
+    private void doPlayStreams(int seed, float probability) throws Throwable {
+        if (!hasH264(false)) {
+            return;
+        }
         Random random = new Random(seed);
         createHttpServer(seed, probability);
         for (int i = 0; i < 10; i++) {
diff --git a/tests/tests/permission/src/android/permission/cts/NoReadLogsPermissionTest.java b/tests/tests/permission/src/android/permission/cts/NoReadLogsPermissionTest.java
index 8979a07..7b3799d 100644
--- a/tests/tests/permission/src/android/permission/cts/NoReadLogsPermissionTest.java
+++ b/tests/tests/permission/src/android/permission/cts/NoReadLogsPermissionTest.java
@@ -48,7 +48,7 @@
         BufferedReader reader = null;
         try {
             logcatProc = Runtime.getRuntime().exec(new String[]
-                    {"logcat", "-d", "ActivityManager:* *:S" });
+                    {"logcat", "-v", "brief", "-d", "ActivityManager:* *:S" });
 
             reader = new BufferedReader(new InputStreamReader(logcatProc.getInputStream()));
 
diff --git a/tests/tests/print/src/android/print/cts/BasePrintTest.java b/tests/tests/print/src/android/print/cts/BasePrintTest.java
index 1493bc9..c73bb64 100644
--- a/tests/tests/print/src/android/print/cts/BasePrintTest.java
+++ b/tests/tests/print/src/android/print/cts/BasePrintTest.java
@@ -25,6 +25,7 @@
 import static org.mockito.Mockito.when;
 
 import android.content.Context;
+import android.content.pm.PackageManager;
 import android.content.res.Configuration;
 import android.content.res.Resources;
 import android.graphics.pdf.PdfDocument;
@@ -458,4 +459,8 @@
             }
         }
     }
+
+    protected boolean supportsPrinting() {
+        return getActivity().getPackageManager().hasSystemFeature(PackageManager.FEATURE_PRINTING);
+    }
 }
diff --git a/tests/tests/print/src/android/print/cts/PageRangeAdjustmentTest.java b/tests/tests/print/src/android/print/cts/PageRangeAdjustmentTest.java
index 4952cbd..b9fd50a 100644
--- a/tests/tests/print/src/android/print/cts/PageRangeAdjustmentTest.java
+++ b/tests/tests/print/src/android/print/cts/PageRangeAdjustmentTest.java
@@ -62,6 +62,10 @@
     private static final String FIRST_PRINTER = "First printer";
 
     public void testAllPagesWantedAndAllPagesWritten() throws Exception {
+        if (!supportsPrinting()) {
+            return;
+        }
+
         // Create a callback for the target print service.
         PrintServiceCallbacks firstServiceCallbacks = createMockPrintServiceCallbacks(
             new Answer<PrinterDiscoverySessionCallbacks>() {
@@ -161,6 +165,10 @@
     }
 
     public void testSomePagesWantedAndAllPagesWritten() throws Exception {
+        if (!supportsPrinting()) {
+            return;
+        }
+
         // Create a callback for the target print service.
         PrintServiceCallbacks firstServiceCallbacks = createMockPrintServiceCallbacks(
             new Answer<PrinterDiscoverySessionCallbacks>() {
@@ -269,6 +277,10 @@
     }
 
     public void testSomePagesWantedAndSomeMorePagesWritten() throws Exception {
+        if (!supportsPrinting()) {
+            return;
+        }
+
         // Create a callback for the target print service.
         PrintServiceCallbacks firstServiceCallbacks = createMockPrintServiceCallbacks(
             new Answer<PrinterDiscoverySessionCallbacks>() {
@@ -393,6 +405,10 @@
     }
 
     public void testSomePagesWantedAndNotWritten() throws Exception {
+        if (!supportsPrinting()) {
+            return;
+        }
+
         // Create a callback for the target print service.
         PrintServiceCallbacks firstServiceCallbacks = createMockPrintServiceCallbacks(
             new Answer<PrinterDiscoverySessionCallbacks>() {
@@ -481,6 +497,10 @@
     }
 
     public void testWantedPagesAlreadyWrittenForPreview() throws Exception {
+        if (!supportsPrinting()) {
+            return;
+        }
+
         // Create a callback for the target print service.
         PrintServiceCallbacks firstServiceCallbacks = createMockPrintServiceCallbacks(
             new Answer<PrinterDiscoverySessionCallbacks>() {
diff --git a/tools/tradefed-host/src/com/android/cts/tradefed/build/CtsBuildProvider.java b/tools/tradefed-host/src/com/android/cts/tradefed/build/CtsBuildProvider.java
index 62d7f47..2ee649d 100644
--- a/tools/tradefed-host/src/com/android/cts/tradefed/build/CtsBuildProvider.java
+++ b/tools/tradefed-host/src/com/android/cts/tradefed/build/CtsBuildProvider.java
@@ -31,7 +31,7 @@
     @Option(name="cts-install-path", description="the path to the cts installation to use")
     private String mCtsRootDirPath = System.getProperty("CTS_ROOT");
 
-    public static final String CTS_BUILD_VERSION = "5.0_r1.9";
+    public static final String CTS_BUILD_VERSION = "5.0_r1.91";
 
     /**
      * {@inheritDoc}