Merge "Revert "Refactor neverallow tests to be parameterized""
diff --git a/apps/CameraITS/tools/run_all_tests.py b/apps/CameraITS/tools/run_all_tests.py
index ed3d50d..43031fe 100755
--- a/apps/CameraITS/tools/run_all_tests.py
+++ b/apps/CameraITS/tools/run_all_tests.py
@@ -401,7 +401,7 @@
   """
   cmd = (f'adb -s {device_id} shell cmd device_state state')
   result = subprocess.getoutput(cmd)
-  if 'CLOSED' in result:
+  if 'CLOSE' in result:
     return True
   return False
 
@@ -539,8 +539,11 @@
       device_state = 'folded' if device_folded else 'opened'
 
     testing_folded_front_camera = (testing_foldable_device and
-                                   device_folded and
                                    _FRONT_CAMERA_ID in camera_id)
+    if testing_folded_front_camera:
+      if not device_folded:
+        raise AssertionError(
+            'Device should be folded while testing folded scene.')
 
     # Raise an assertion error if there is any camera unavailable in
     # current device state. Usually scenes with suffix 'folded' will
diff --git a/apps/CameraITS/utils/camera_properties_utils.py b/apps/CameraITS/utils/camera_properties_utils.py
index 44aa510..89dbd37 100644
--- a/apps/CameraITS/utils/camera_properties_utils.py
+++ b/apps/CameraITS/utils/camera_properties_utils.py
@@ -567,7 +567,7 @@
     Return:
         Boolean.
     """
-    return 'android.edge.availableToneMapModes' in props and mode in props[
+    return 'android.tonemap.availableToneMapModes' in props and mode in props[
         'android.tonemap.availableToneMapModes']
 
 
diff --git a/apps/CtsVerifier/res/layout/ca_install_via_intent.xml b/apps/CtsVerifier/res/layout/ca_install_via_intent.xml
index 4b529d1..38d75b7 100644
--- a/apps/CtsVerifier/res/layout/ca_install_via_intent.xml
+++ b/apps/CtsVerifier/res/layout/ca_install_via_intent.xml
@@ -13,7 +13,11 @@
      See the License for the specific language governing permissions and
      limitations under the License.
 -->
-<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+<ScrollView xmlns:android="http://schemas.android.com/apk/res/android"
+            android:layout_width="match_parent"
+            android:layout_height="match_parent"
+            android:orientation="vertical">
+ <LinearLayout
         android:orientation="vertical"
         android:layout_width="match_parent"
         android:layout_height="match_parent">
@@ -47,5 +51,5 @@
 
     <include layout="@layout/pass_fail_buttons" />
 
-</LinearLayout>
-
+ </LinearLayout>
+</ScrollView>
diff --git a/apps/CtsVerifier/res/layout/clipboard_preview.xml b/apps/CtsVerifier/res/layout/clipboard_preview.xml
index efec118..ff9d4af 100644
--- a/apps/CtsVerifier/res/layout/clipboard_preview.xml
+++ b/apps/CtsVerifier/res/layout/clipboard_preview.xml
@@ -13,7 +13,11 @@
      See the License for the specific language governing permissions and
      limitations under the License.
 -->
-<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+<ScrollView xmlns:android="http://schemas.android.com/apk/res/android"
+            android:layout_width="match_parent"
+            android:layout_height="match_parent"
+            android:orientation="vertical">
+ <LinearLayout
     android:layout_width="match_parent"
     android:layout_height="match_parent"
     android:orientation="vertical">
@@ -37,4 +41,5 @@
         android:layout_height="wrap_content"
         android:visibility="invisible"
         layout="@layout/pass_fail_buttons"/>
-</LinearLayout>
+ </LinearLayout>
+</ScrollView>
diff --git a/apps/CtsVerifier/res/layout/pass_fail_set_password_complexity.xml b/apps/CtsVerifier/res/layout/pass_fail_set_password_complexity.xml
index e8cb0ff..b8b2825 100644
--- a/apps/CtsVerifier/res/layout/pass_fail_set_password_complexity.xml
+++ b/apps/CtsVerifier/res/layout/pass_fail_set_password_complexity.xml
@@ -17,13 +17,19 @@
 <RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
                 android:layout_width="match_parent"
                 android:layout_height="match_parent">
+    <ScrollView
+        android:layout_width="match_parent"
+        android:layout_height="match_parent"
+        android:fillViewport="true"
+        android:orientation="vertical">
     <LinearLayout android:layout_width="match_parent"
                   android:layout_height="match_parent"
                   android:layout_alignParentTop="true"
                   android:layout_alignParentStart="true"
                   android:orientation="vertical"
                   android:divider="@android:color/white"
-                  android:showDividers="middle">
+                  android:showDividers="middle"
+                  android:paddingBottom="30dp">
 
         <LinearLayout android:layout_height="wrap_content"
                       android:layout_width="wrap_content"
@@ -83,6 +89,7 @@
         </LinearLayout>
 
     </LinearLayout>
+    </ScrollView>
 
     <include android:layout_width="match_parent"
              android:layout_height="wrap_content"
diff --git a/apps/CtsVerifier/res/layout/widget_layout.xml b/apps/CtsVerifier/res/layout/widget_layout.xml
index b0cce17..9f380c6 100644
--- a/apps/CtsVerifier/res/layout/widget_layout.xml
+++ b/apps/CtsVerifier/res/layout/widget_layout.xml
@@ -41,7 +41,7 @@
             android:gravity="top|left"
             android:layout_marginBottom="10dp"
             android:fontFamily="sans-serif"
-            android:textSize="20sp"
+            android:textSize="18sp"
             android:text="@string/widget_name"
             android:freezesText="true"/>
 
@@ -51,7 +51,7 @@
             android:layout_height="wrap_content"
             android:layout_marginBottom="20dp"
             android:fontFamily="sans-serif-light"
-            android:textSize="16sp"
+            android:textSize="15sp"
             android:freezesText="true"/>
 
         <TextView
@@ -61,7 +61,7 @@
             android:layout_marginBottom="18dp"
             android:layout_gravity="center_horizontal"
             android:fontFamily="sans-serif-light"
-            android:textSize="16sp"/>
+            android:textSize="15sp"/>
 
         <ListView
             android:id="@+id/list"
@@ -84,12 +84,14 @@
                 android:layout_width="wrap_content"
                 android:layout_height="wrap_content"
                 android:minWidth="100dp"
+                android:textSize="15sp"
                 android:text="@string/widget_fail" />
             <Button
                 android:id="@+id/pass"
                 android:layout_width="wrap_content"
                 android:layout_height="wrap_content"
                 android:minWidth="100dp"
+                android:textSize="15sp"
                 android:text="@string/widget_pass" />
         </LinearLayout>
 
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioDescriptorActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioDescriptorActivity.java
index 2aed7f9..1a94bb6 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioDescriptorActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioDescriptorActivity.java
@@ -127,6 +127,11 @@
     }
 
     @Override
+    public boolean requiresReportLog() {
+        return true;
+    }
+
+    @Override
     public void recordTestResults() {
         CtsVerifierReportLog reportLog = getReportLog();
 
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioMicrophoneMuteToggleActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioMicrophoneMuteToggleActivity.java
index 87b5f12..dcde897 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioMicrophoneMuteToggleActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioMicrophoneMuteToggleActivity.java
@@ -147,6 +147,11 @@
     }
 
     @Override
+    public boolean requiresReportLog() {
+        return true;
+    }
+
+    @Override
     public void recordTestResults() {
         CtsVerifierReportLog reportLog = getReportLog();
 
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsTestActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsTestActivity.java
index ca319df..fbfd6f1 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsTestActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/camera/its/ItsTestActivity.java
@@ -607,8 +607,16 @@
         return "Camera: " + cam + ", " + scene;
     }
 
+    // CtsVerifier has a "Folded" toggle that selectively surfaces some tests.
+    // To separate the tests in folded and unfolded states, CtsVerifier adds a [folded]
+    // suffix to the test id in its internal database depending on the state of the "Folded"
+    // toggle button. However, CameraITS has tests that it needs to persist across both folded
+    // and unfolded states.To get the test results to persist, we need CtsVerifier to store and
+    // look up the same test id regardless of the toggle button state.
+    // TODO(b/282804139): Update CTS tests to allow activities to write tests that persist
+    // across the states
     protected String testId(String cam, String scene) {
-        return "Camera_ITS_" + cam + "_" + scene;
+        return "Camera_ITS_" + cam + "_" + scene + "[folded]";
     }
 
     protected boolean isFoldableDevice() {
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/features/FeatureUtil.java b/apps/CtsVerifier/src/com/android/cts/verifier/features/FeatureUtil.java
index a7b58fa..9f6ea58 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/features/FeatureUtil.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/features/FeatureUtil.java
@@ -90,10 +90,52 @@
      * Checks whether the device supports file transfer.
      */
     public static boolean isUsbFileTransferSupported(Context context) {
-        return !isWatchOrAutomotive(context);
+        return !isWatchOrAutomotive(context) && !isTelevision(context);
     }
 
     /**
+     * Checks if VPN Config is supported.
+     */
+    public static boolean isVpnConfigSupported(Context context) {
+        return !isWatch(context);
+    }
+
+    /**
+     * Checks if Disabling Keyguard is supported.
+     */
+    public static boolean isDisableKeyguardSupported(Context context) {
+        return !isWatch(context);
+    }
+
+    /**
+     * Checks if Lock Task is supported.
+     */
+    public static boolean isLockTaskSupported(Context context) {
+        return !isWatch(context) && !isTelevision(context);
+    }
+
+    /**
+     * Checks if Status Bar is supported.
+     */
+    public static boolean isStatusBarSupported(Context context) {
+        return !isWatch(context) && !isTelevision(context);
+    }
+
+    /**
+     * Checks if Data Roaming is supported.
+     */
+    public static boolean isDataRoamingSupported(Context context) {
+        PackageManager pm = context.getPackageManager();
+        return pm.hasSystemFeature(PackageManager.FEATURE_TELEPHONY) && !isWatch(context);
+    }
+
+    /**
+     * Checks is Swipe To Unlock is supported.
+     */
+    public static boolean isSwipeToUnlockSupported(Context context) {
+        return !isAutomotive(context);
+    }
+    /**
      * Checks whether the device is watch .
      */
     public static boolean isWatch(Context context) {
@@ -119,6 +161,14 @@
     }
 
     /**
+     * Checks whether the device is a TV
+     */
+    public static boolean isTelevision(Context context) {
+        PackageManager pm = context.getPackageManager();
+        return pm.hasSystemFeature(PackageManager.FEATURE_LEANBACK);
+    }
+
+    /**
      * Checks whether the device supports managed secondary users.
      */
     public static boolean supportManagedSecondaryUsers(Context context) {
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/managedprovisioning/DeviceOwnerPositiveTestActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/managedprovisioning/DeviceOwnerPositiveTestActivity.java
index 74ea3f0..90f432d 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/managedprovisioning/DeviceOwnerPositiveTestActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/managedprovisioning/DeviceOwnerPositiveTestActivity.java
@@ -355,9 +355,7 @@
         }
 
         // DISALLOW_DATA_ROAMING
-        // TODO(b/189282625): replace FEATURE_WATCH with a more specific feature
-        if (!packageManager.hasSystemFeature(PackageManager.FEATURE_WATCH)
-                && packageManager.hasSystemFeature(PackageManager.FEATURE_TELEPHONY)) {
+        if (FeatureUtil.isDataRoamingSupported(this)) {
             adapter.add(createInteractiveTestItem(this, DISALLOW_DATA_ROAMING_ID,
                     R.string.device_owner_disallow_data_roaming,
                     R.string.device_owner_disallow_data_roaming_info,
@@ -413,7 +411,7 @@
         }
 
         // DISALLOW_USB_FILE_TRANSFER
-        if (FeatureUtil.isUsbFileTransferSupported(this) && !Utils.isTV(this)) {
+        if (FeatureUtil.isUsbFileTransferSupported(this)) {
             adapter.add(createInteractiveTestItem(this, DISALLOW_USB_FILE_TRANSFER_ID,
                     R.string.device_owner_disallow_usb_file_transfer_test,
                     R.string.device_owner_disallow_usb_file_transfer_test_info,
@@ -430,7 +428,7 @@
         }
 
         // DISABLE_STATUS_BAR_TEST
-        if (isStatusBarEnabled()) {
+        if (FeatureUtil.isStatusBarSupported(this)) {
             adapter.add(createInteractiveTestItem(this, DISABLE_STATUS_BAR_TEST_ID,
                     R.string.device_owner_disable_statusbar_test,
                     R.string.device_owner_disable_statusbar_test_info,
@@ -450,9 +448,8 @@
 
         // Without PIN/Password watches don't have any lockscreen, so this policy isn't applicable
         // setKeyguardDisabled
-        if (FeatureUtil.isKeyguardShownWhenUserDoesntHaveCredentials(this) &&
-                Utils.isLockscreenSupported(this) &&
-                !packageManager.hasSystemFeature(PackageManager.FEATURE_WATCH)) {
+        if (FeatureUtil.isKeyguardShownWhenUserDoesntHaveCredentials(this)
+                && Utils.isLockscreenSupported(this)) {
             adapter.add(createInteractiveTestItem(this, DISABLE_KEYGUARD_TEST_ID,
                     R.string.device_owner_disable_keyguard_test,
                     R.string.device_owner_disable_keyguard_test_info,
@@ -471,8 +468,7 @@
         }
 
         // setLockTaskFeatures
-        // TODO(b/189282625): replace FEATURE_WATCH with a more specific feature
-        if (!packageManager.hasSystemFeature(PackageManager.FEATURE_WATCH) && !Utils.isTV(this)) {
+        if (FeatureUtil.isLockTaskSupported(this)) {
             final Intent lockTaskUiTestIntent = new Intent(this, LockTaskUiTestActivity.class);
             lockTaskUiTestIntent.putExtra(LockTaskUiTestActivity.EXTRA_TEST_ID,
                     LOCK_TASK_UI_TEST_ID);
@@ -622,7 +618,8 @@
                                 createDisableNetworkLoggingIntent())}));
 
         // Customize lock screen message
-        if (isSwipeToUnlockSupported() && Utils.isLockscreenSupported(this)) {
+        if (FeatureUtil.isSwipeToUnlockSupported(this)
+                && Utils.isLockscreenSupported(this)) {
             adapter.add(TestListItem.newTest(this,
                     R.string.device_owner_customize_lockscreen_message,
                     LockscreenMessageTestActivity.class.getName(),
@@ -787,16 +784,6 @@
                 .putExtra(CommandReceiverActivity.EXTRA_VALUE, level);
     }
 
-    private boolean isStatusBarEnabled() {
-        // Watches don't support the status bar so this is an ok proxy, but this is not the most
-        // general test for that. TODO: add a test API to do a real check for status bar support.
-        return !getPackageManager().hasSystemFeature(PackageManager.FEATURE_WATCH)
-                && !isTelevision();
-    }
-
-    private boolean isSwipeToUnlockSupported() {
-        return !isAutomotive();
-    }
 
     private boolean isAutomotive() {
         return FeatureUtil.isAutomotive(this);
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/usb/OWNERS b/apps/CtsVerifier/src/com/android/cts/verifier/usb/OWNERS
index 568938c..1015e40 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/usb/OWNERS
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/usb/OWNERS
@@ -1 +1,4 @@
 # Bug component: 175220
+aprasath@google.com
+sarup@google.com
+kumarashishg@google.com
\ No newline at end of file
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/usb/accessory/UsbAccessoryTestActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/usb/accessory/UsbAccessoryTestActivity.java
index 512162c..d8560fa 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/usb/accessory/UsbAccessoryTestActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/usb/accessory/UsbAccessoryTestActivity.java
@@ -108,6 +108,11 @@
     }
 
     @Override
+    public boolean requiresReportLog() {
+        return true;
+    }
+
+    @Override
     public void onAttached(UsbAccessory accessory) {
         mStatus.setText(R.string.usb_accessory_test_step2);
         mProgress.setVisibility(View.VISIBLE);
diff --git a/build/device_info_package.mk b/build/device_info_package.mk
index cfd623d..45796e7 100644
--- a/build/device_info_package.mk
+++ b/build/device_info_package.mk
@@ -37,6 +37,7 @@
   $(DEVICE_INFO_PACKAGE).GraphicsDeviceInfo \
   $(DEVICE_INFO_PACKAGE).HapticsDeviceInfo \
   $(DEVICE_INFO_PACKAGE).InputDeviceInfo \
+  $(DEVICE_INFO_PACKAGE).KeystoreAttestationDeviceInfo \
   $(DEVICE_INFO_PACKAGE).LocaleDeviceInfo \
   $(DEVICE_INFO_PACKAGE).MediaDeviceInfo \
   $(DEVICE_INFO_PACKAGE).MemoryDeviceInfo \
diff --git a/common/device-side/device-info/src/com/android/compatibility/common/deviceinfo/KeystoreAttestationDeviceInfo.java b/common/device-side/device-info/src/com/android/compatibility/common/deviceinfo/KeystoreAttestationDeviceInfo.java
index a951429..6fd3a6c 100644
--- a/common/device-side/device-info/src/com/android/compatibility/common/deviceinfo/KeystoreAttestationDeviceInfo.java
+++ b/common/device-side/device-info/src/com/android/compatibility/common/deviceinfo/KeystoreAttestationDeviceInfo.java
@@ -158,24 +158,28 @@
         }
         if (keyDetailsList.attestationIdBrand.isPresent()) {
             localStore.addResult(
-                    "id_brand", new String(keyDetailsList.attestationIdBrand.get(), UTF_8));
+                    "attestation_id_brand",
+                    new String(keyDetailsList.attestationIdBrand.get(), UTF_8));
         }
         if (keyDetailsList.attestationIdDevice.isPresent()) {
             localStore.addResult(
-                    "id_device", new String(keyDetailsList.attestationIdDevice.get(), UTF_8));
+                    "attestation_id_device",
+                    new String(keyDetailsList.attestationIdDevice.get(), UTF_8));
         }
         if (keyDetailsList.attestationIdProduct.isPresent()) {
             localStore.addResult(
-                    "id_product", new String(keyDetailsList.attestationIdProduct.get(), UTF_8));
+                    "attestation_id_product",
+                    new String(keyDetailsList.attestationIdProduct.get(), UTF_8));
         }
         if (keyDetailsList.attestationIdManufacturer.isPresent()) {
             localStore.addResult(
-                    "build_manufacturer",
+                    "attestation_id_manufacturer",
                     new String(keyDetailsList.attestationIdManufacturer.get(), UTF_8));
         }
         if (keyDetailsList.attestationIdModel.isPresent()) {
             localStore.addResult(
-                    "build_model", new String(keyDetailsList.attestationIdModel.get(), UTF_8));
+                    "attestation_id_model",
+                    new String(keyDetailsList.attestationIdModel.get(), UTF_8));
         }
         if (keyDetailsList.vendorPatchLevel.isPresent()) {
             localStore.addResult("vendor_patch_level", keyDetailsList.vendorPatchLevel.get());
diff --git a/hostsidetests/appsecurity/Android.bp b/hostsidetests/appsecurity/Android.bp
index de8c4e2..09d79d4 100644
--- a/hostsidetests/appsecurity/Android.bp
+++ b/hostsidetests/appsecurity/Android.bp
@@ -25,14 +25,12 @@
         "src/**/*.java",
         "src/**/*.kt",
     ],
-
     libs: [
         "cts-tradefed",
         "tradefed",
         "compatibility-host-util",
         "truth-prebuilt",
         "hamcrest-library",
-        "sts-host-util",
     ],
 
     static_libs: [
@@ -40,6 +38,7 @@
         "CtsPkgInstallerConstants",
         "cts-host-utils",
         "cts-statsd-atom-host-test-utils",
+        "sts-host-util",
     ],
 
     java_resource_dirs: ["res"],
diff --git a/hostsidetests/appsecurity/test-apps/ListeningPortsApp/src/android/appsecurity/cts/listeningports/ListeningPortsTest.java b/hostsidetests/appsecurity/test-apps/ListeningPortsApp/src/android/appsecurity/cts/listeningports/ListeningPortsTest.java
index 072effb..b674fc2 100644
--- a/hostsidetests/appsecurity/test-apps/ListeningPortsApp/src/android/appsecurity/cts/listeningports/ListeningPortsTest.java
+++ b/hostsidetests/appsecurity/test-apps/ListeningPortsApp/src/android/appsecurity/cts/listeningports/ListeningPortsTest.java
@@ -95,6 +95,22 @@
         EXCEPTION_PATTERNS.add("0.0.0.0:68");
     }
 
+    private static final List<String> OEM_EXCEPTION_PATTERNS = new ArrayList<String>();
+
+    static {
+        // PTP vendor OEM service
+        OEM_EXCEPTION_PATTERNS.add("0.0.0.0:319");
+        OEM_EXCEPTION_PATTERNS.add("0.0.0.0:320");
+    }
+
+    private static boolean isOemUid(int uid) {
+        return (uid >= 2900 && uid <= 2999) || (uid >= 5000 && uid <= 5999);
+    }
+
+    private boolean isTv() {
+        return getContext().getPackageManager().hasSystemFeature(PackageManager.FEATURE_LEANBACK);
+    }
+
     /**
      * Remotely accessible ports (loopback==false) are often used by
      * attackers to gain unauthorized access to computers systems without
@@ -113,6 +129,8 @@
         final boolean isTcp = Boolean.valueOf(testArgs.getString(IS_TCP_PARAM));
         final boolean loopback = Boolean.valueOf(testArgs.getString(LOOPBACK_PARAM));
 
+        final boolean tv = isTv();
+
         String errors = "";
         List<ParsedProcEntry> entries = ParsedProcEntry.parse(procFileContents);
         for (ParsedProcEntry entry : entries) {
@@ -122,6 +140,7 @@
 
             if (isPortListening(entry.state, isTcp)
                     && !(isException(addrPort) || isException(addrUid) || isException(addrPortUid))
+                    && !(tv && isOemUid(entry.uid) && isOemException(addrPort))
                     && (!entry.localAddress.isLoopbackAddress() ^ loopback)) {
                 if (isTcp && !isTcpConnectable(entry.localAddress, entry.port)) {
                     continue;
@@ -190,6 +209,10 @@
         return isPatternMatch(EXCEPTION_PATTERNS, localAddress);
     }
 
+    private static boolean isOemException(String localAddress) {
+        return isPatternMatch(OEM_EXCEPTION_PATTERNS, localAddress);
+    }
+
     private static boolean isPatternMatch(List<String> patterns, String input) {
         for (String pattern : patterns) {
             pattern = Pattern.quote(pattern);
diff --git a/hostsidetests/securitybulletin/Android.bp b/hostsidetests/securitybulletin/Android.bp
index 803ceca..3727e80 100644
--- a/hostsidetests/securitybulletin/Android.bp
+++ b/hostsidetests/securitybulletin/Android.bp
@@ -26,11 +26,11 @@
         "general-tests",
         "sts",
     ],
+    static_libs: ["sts-host-util"],
     // Must match the package name in CtsTestCaseList.mk
     libs: [
         "compatibility-host-util",
         "cts-tradefed",
-        "sts-host-util",
         "tradefed",
     ],
     data: [
diff --git a/hostsidetests/theme/assets/33/520dpi.zip b/hostsidetests/theme/assets/33/520dpi.zip
new file mode 100644
index 0000000..95a5c9c
--- /dev/null
+++ b/hostsidetests/theme/assets/33/520dpi.zip
Binary files differ
diff --git a/hostsidetests/usb/OWNERS b/hostsidetests/usb/OWNERS
new file mode 100644
index 0000000..1015e40
--- /dev/null
+++ b/hostsidetests/usb/OWNERS
@@ -0,0 +1,4 @@
+# Bug component: 175220
+aprasath@google.com
+sarup@google.com
+kumarashishg@google.com
\ No newline at end of file
diff --git a/libs/json/fuzzers/Android.bp b/libs/json/fuzzers/Android.bp
index d6f7f0d..ab887e9 100644
--- a/libs/json/fuzzers/Android.bp
+++ b/libs/json/fuzzers/Android.bp
@@ -16,11 +16,13 @@
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
 
-java_fuzz_host {
+java_fuzz {
     name: "json-reader-fuzzer",
     srcs: [
         "JsonReaderFuzzer.java",
     ],
+    host_supported: true,
+    device_supported: false,
     static_libs: [
         "jazzer",
         "jsonlib",
diff --git a/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityOverlayTest.java b/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityOverlayTest.java
index 6cd2b9b..8989f52 100644
--- a/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityOverlayTest.java
+++ b/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityOverlayTest.java
@@ -16,14 +16,18 @@
 
 package android.accessibilityservice.cts;
 
+import static android.accessibilityservice.cts.utils.ActivityLaunchUtils.launchActivityOnSpecifiedDisplayAndWaitForItToBeOnscreen;
+
 import static org.junit.Assert.assertTrue;
 
 import android.accessibility.cts.common.AccessibilityDumpOnFailureRule;
 import android.accessibility.cts.common.InstrumentedAccessibilityService;
 import android.accessibility.cts.common.InstrumentedAccessibilityServiceTestRule;
 import android.accessibilityservice.AccessibilityServiceInfo;
+import android.accessibilityservice.cts.activities.AccessibilityWindowQueryActivity;
 import android.accessibilityservice.cts.utils.AsyncUtils;
 import android.accessibilityservice.cts.utils.DisplayUtils;
+import android.app.Instrumentation;
 import android.app.UiAutomation;
 import android.content.Context;
 import android.text.TextUtils;
@@ -36,8 +40,6 @@
 import androidx.test.platform.app.InstrumentationRegistry;
 import androidx.test.runner.AndroidJUnit4;
 
-import com.android.compatibility.common.util.TestUtils;
-
 import org.junit.AfterClass;
 import org.junit.Before;
 import org.junit.BeforeClass;
@@ -52,6 +54,7 @@
 @RunWith(AndroidJUnit4.class)
 public class AccessibilityOverlayTest {
 
+    private static Instrumentation sInstrumentation;
     private static UiAutomation sUiAutomation;
     InstrumentedAccessibilityService mService;
 
@@ -69,7 +72,8 @@
 
     @BeforeClass
     public static void oneTimeSetUp() {
-        sUiAutomation = InstrumentationRegistry.getInstrumentation()
+        sInstrumentation = InstrumentationRegistry.getInstrumentation();
+        sUiAutomation = sInstrumentation
                 .getUiAutomation(UiAutomation.FLAG_DONT_SUPPRESS_ACCESSIBILITY_SERVICES);
         AccessibilityServiceInfo info = sUiAutomation.getServiceInfo();
         info.flags |= AccessibilityServiceInfo.FLAG_RETRIEVE_INTERACTIVE_WINDOWS;
@@ -104,20 +108,16 @@
                     mService, false);
             final int displayId = newDisplay.getDisplayId();
             final String overlayTitle = "Overlay title on virtualDisplay";
-            // Make sure the onDisplayAdded callback of a11y framework handled by checking if the
-            // accessibilityWindowInfo list of the virtual display has been added.
-            // And the a11y default token is available after the onDisplayAdded callback handled.
-            TestUtils.waitUntil("AccessibilityWindowInfo list of the virtual display are not ready",
-                    () -> {
-                        final SparseArray<List<AccessibilityWindowInfo>> allWindows =
-                                sUiAutomation.getWindowsOnAllDisplays();
-                        return allWindows.get(displayId) != null;
-                    }
-            );
-            final Context newDisplayContext = mService.createDisplayContext(newDisplay);
+
+            // Create an initial activity window on the virtual display to ensure that
+            // AccessibilityWindowManager is tracking windows for the display.
+            launchActivityOnSpecifiedDisplayAndWaitForItToBeOnscreen(sInstrumentation,
+                    sUiAutomation,
+                    AccessibilityWindowQueryActivity.class,
+                    displayId);
 
             sUiAutomation.executeAndWaitForEvent(() -> mService.runOnServiceSync(() -> {
-                addOverlayWindow(newDisplayContext, overlayTitle);
+                addOverlayWindow(mService.createDisplayContext(newDisplay), overlayTitle);
             }), (event) -> findOverlayWindow(displayId) != null, AsyncUtils.DEFAULT_TIMEOUT_MS);
 
             assertTrue(TextUtils.equals(findOverlayWindow(displayId).getTitle(), overlayTitle));
diff --git a/tests/app/src/android/app/cts/ActivityManagerMemoryClassTest.java b/tests/app/src/android/app/cts/ActivityManagerMemoryClassTest.java
index a49f4a5..f9074ee 100644
--- a/tests/app/src/android/app/cts/ActivityManagerMemoryClassTest.java
+++ b/tests/app/src/android/app/cts/ActivityManagerMemoryClassTest.java
@@ -80,6 +80,8 @@
             expectedMemorySizeForWatch.put(DisplayMetrics.DENSITY_560, 112);
             expectedMemorySizeForWatch.put(DisplayMetrics.DENSITY_600, 138);
             expectedMemorySizeForWatch.put(DisplayMetrics.DENSITY_XXXHIGH, 154);
+            // Backport of DENSITY_520 from Android 14 to android13-tests-dev
+            expectedMemorySizeForWatch.put(520, 112);
         }
 
         static {
@@ -105,6 +107,8 @@
             expectedMemorySizeForSmallNormalScreen.put(DisplayMetrics.DENSITY_560, 192);
             expectedMemorySizeForSmallNormalScreen.put(DisplayMetrics.DENSITY_600, 228);
             expectedMemorySizeForSmallNormalScreen.put(DisplayMetrics.DENSITY_XXXHIGH, 256);
+            // Backport of DENSITY_520 from Android 14 to android13-tests-dev
+            expectedMemorySizeForSmallNormalScreen.put(520, 192);
         }
 
         static {
@@ -130,6 +134,8 @@
             expectedMemorySizeForLargeScreen.put(DisplayMetrics.DENSITY_560, 384);
             expectedMemorySizeForLargeScreen.put(DisplayMetrics.DENSITY_600, 448);
             expectedMemorySizeForLargeScreen.put(DisplayMetrics.DENSITY_XXXHIGH, 512);
+            // Backport of DENSITY_520 from Android 14 to android13-tests-dev
+            expectedMemorySizeForSmallNormalScreen.put(520, 192);
         }
 
         static {
@@ -155,6 +161,8 @@
             expectedMemorySizeForXLargeScreen.put(DisplayMetrics.DENSITY_560, 576);
             expectedMemorySizeForXLargeScreen.put(DisplayMetrics.DENSITY_600, 672);
             expectedMemorySizeForXLargeScreen.put(DisplayMetrics.DENSITY_XXXHIGH, 768);
+            // Backport of DENSITY_520 from Android 14 to android13-tests-dev
+            expectedMemorySizeForXLargeScreen.put(520, 576);
         }
 
         public static Integer getExpectedMemorySize(
diff --git a/tests/app/src/android/app/cts/OWNERS b/tests/app/src/android/app/cts/OWNERS
index dfa8ebb..e9918c6 100644
--- a/tests/app/src/android/app/cts/OWNERS
+++ b/tests/app/src/android/app/cts/OWNERS
@@ -1,3 +1,2 @@
-# Bug component: 803062
-per-file Nearby*=file:platform/frameworks/base:/packages/SystemUI/OWNERS
-per-file UpdateMediaTapToTransfer*.kt=file:platform/frameworks/base:/packages/SystemUI/OWNERS
+# Bug component: 803062 = per-file Nearby*, UpdateMediaTapToTransfer*.kt
+per-file Nearby*, UpdateMediaTapToTransfer*.kt = file:platform/frameworks/base:/packages/SystemUI/OWNERS
diff --git a/tests/autofillservice/src/android/autofillservice/cts/DuplicateIdActivityTest.java b/tests/autofillservice/src/android/autofillservice/cts/DuplicateIdActivityTest.java
index f821d68..b3f0731 100644
--- a/tests/autofillservice/src/android/autofillservice/cts/DuplicateIdActivityTest.java
+++ b/tests/autofillservice/src/android/autofillservice/cts/DuplicateIdActivityTest.java
@@ -106,6 +106,8 @@
     @Test
     public void testDoNotRestoreDuplicateAutofillIds() throws Exception {
         assumeTrue("Rotation is supported", Helper.isRotationSupported(mContext));
+        assumeTrue("Device state is not REAR_DISPLAY",
+                !Helper.isDeviceInState(mContext, Helper.DeviceStateEnum.REAR_DISPLAY));
 
         enableService();
 
diff --git a/tests/autofillservice/src/android/autofillservice/cts/SessionLifecycleTest.java b/tests/autofillservice/src/android/autofillservice/cts/SessionLifecycleTest.java
index e8a58bd..29163a2 100644
--- a/tests/autofillservice/src/android/autofillservice/cts/SessionLifecycleTest.java
+++ b/tests/autofillservice/src/android/autofillservice/cts/SessionLifecycleTest.java
@@ -166,6 +166,8 @@
     @Test
     public void testDatasetAuthResponseWhileAutofilledAppIsLifecycled() throws Exception {
         assumeTrue("Rotation is supported", Helper.isRotationSupported(mContext));
+        assumeTrue("Device state is not REAR_DISPLAY",
+                !Helper.isDeviceInState(mContext, Helper.DeviceStateEnum.REAR_DISPLAY));
         final ActivityManager activityManager = (ActivityManager) getContext()
                 .getSystemService(Context.ACTIVITY_SERVICE);
         assumeFalse(activityManager.isLowRamDevice());
diff --git a/tests/autofillservice/src/android/autofillservice/cts/augmented/AugmentedLoginActivityTest.java b/tests/autofillservice/src/android/autofillservice/cts/augmented/AugmentedLoginActivityTest.java
index 537a1fe..b369c41 100644
--- a/tests/autofillservice/src/android/autofillservice/cts/augmented/AugmentedLoginActivityTest.java
+++ b/tests/autofillservice/src/android/autofillservice/cts/augmented/AugmentedLoginActivityTest.java
@@ -162,6 +162,8 @@
     @Test
     @AppModeFull(reason = "testAutoFill_mainServiceReturnedNull_augmentedAutofillOneField enough")
     public void testAutoFill_neitherServiceCanAutofill_thenManualRequest() throws Exception {
+        assumeTrue("Device state is not REAR_DISPLAY",
+                !Helper.isDeviceInState(mContext, Helper.DeviceStateEnum.REAR_DISPLAY));
         // Set services
         enableService();
         enableAugmentedService();
@@ -734,6 +736,8 @@
     @AppModeFull(reason = "testAutoFill_mainServiceReturnedNull_augmentedAutofillOneField enough")
     public void testAugmentedAutoFill_rotateDevice() throws Exception {
         assumeTrue("Rotation is supported", Helper.isRotationSupported(mContext));
+        assumeTrue("Device state is not REAR_DISPLAY",
+                !Helper.isDeviceInState(mContext, Helper.DeviceStateEnum.REAR_DISPLAY));
 
         // Set services
         enableService();
diff --git a/tests/autofillservice/src/android/autofillservice/cts/commontests/AutoFillServiceTestCase.java b/tests/autofillservice/src/android/autofillservice/cts/commontests/AutoFillServiceTestCase.java
index c1f743b..e07cdd9 100644
--- a/tests/autofillservice/src/android/autofillservice/cts/commontests/AutoFillServiceTestCase.java
+++ b/tests/autofillservice/src/android/autofillservice/cts/commontests/AutoFillServiceTestCase.java
@@ -25,6 +25,8 @@
 
 import static com.android.compatibility.common.util.ShellUtils.runShellCommand;
 
+import static org.junit.Assume.assumeFalse;
+
 import android.app.PendingIntent;
 import android.autofillservice.cts.R;
 import android.autofillservice.cts.activities.AbstractAutoFillActivity;
@@ -425,6 +427,9 @@
             // Collapse notifications.
             runShellCommand("cmd statusbar collapse");
 
+            assumeFalse("Device is half-folded",
+                    Helper.isDeviceInState(mContext, Helper.DeviceStateEnum.HALF_FOLDED));
+
             // Set orientation as portrait, otherwise some tests might fail due to elements not
             // fitting in, IME orientation, etc...
             mUiBot.setScreenOrientation(UiBot.PORTRAIT);
diff --git a/tests/autofillservice/src/android/autofillservice/cts/commontests/CustomDescriptionWithLinkTestCase.java b/tests/autofillservice/src/android/autofillservice/cts/commontests/CustomDescriptionWithLinkTestCase.java
index 55587ae..b612274 100644
--- a/tests/autofillservice/src/android/autofillservice/cts/commontests/CustomDescriptionWithLinkTestCase.java
+++ b/tests/autofillservice/src/android/autofillservice/cts/commontests/CustomDescriptionWithLinkTestCase.java
@@ -93,6 +93,8 @@
     @Test
     public final void testTapLink_changeOrientationThenTapBack() throws Exception {
         assumeTrue("Rotation is supported", Helper.isRotationSupported(mContext));
+        assumeTrue("Device state is not REAR_DISPLAY",
+                !Helper.isDeviceInState(mContext, Helper.DeviceStateEnum.REAR_DISPLAY));
 
         mUiBot.assumeMinimumResolution(500);
         mUiBot.setScreenOrientation(UiBot.PORTRAIT);
diff --git a/tests/autofillservice/src/android/autofillservice/cts/saveui/SimpleSaveActivityTest.java b/tests/autofillservice/src/android/autofillservice/cts/saveui/SimpleSaveActivityTest.java
index 75000ce..6b6235a 100644
--- a/tests/autofillservice/src/android/autofillservice/cts/saveui/SimpleSaveActivityTest.java
+++ b/tests/autofillservice/src/android/autofillservice/cts/saveui/SimpleSaveActivityTest.java
@@ -287,6 +287,8 @@
     @Test
     public void testSave_afterRotation() throws Exception {
         assumeTrue("Rotation is supported", Helper.isRotationSupported(mContext));
+        assumeTrue("Device state is not REAR_DISPLAY",
+                !Helper.isDeviceInState(mContext, Helper.DeviceStateEnum.REAR_DISPLAY));
         mUiBot.setScreenOrientation(UiBot.PORTRAIT);
         try {
             saveTest(true);
diff --git a/tests/autofillservice/src/android/autofillservice/cts/testcore/Helper.java b/tests/autofillservice/src/android/autofillservice/cts/testcore/Helper.java
index fc34db6..8f5841b 100644
--- a/tests/autofillservice/src/android/autofillservice/cts/testcore/Helper.java
+++ b/tests/autofillservice/src/android/autofillservice/cts/testcore/Helper.java
@@ -48,6 +48,8 @@
 import android.content.pm.PackageManager;
 import android.content.res.Resources;
 import android.graphics.Bitmap;
+import android.hardware.devicestate.DeviceStateManager;
+import android.hardware.devicestate.DeviceStateManager.DeviceStateCallback;
 import android.icu.util.Calendar;
 import android.os.Bundle;
 import android.os.Environment;
@@ -92,6 +94,7 @@
 import java.io.File;
 import java.io.IOException;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Collection;
 import java.util.List;
 import java.util.Map;
@@ -1739,6 +1742,97 @@
         throw new UnsupportedOperationException("contain static methods only");
     }
 
+    public enum DeviceStateEnum {
+        HALF_FOLDED,
+        REAR_DISPLAY
+    };
+
+    /**
+     * Test if the device is in half-folded or rear display state.
+     */
+    private static final class DeviceStateAssessor implements DeviceStateCallback {
+        DeviceStateManager mDeviceStateManager;
+        int[] mHalfFoldedStates;
+        int[] mRearDisplayStates;
+        int mCurrentState = -1;
+
+        DeviceStateAssessor(Context context) {
+            Resources systemRes = Resources.getSystem();
+            mHalfFoldedStates = getStatesFromConfig(systemRes, "config_halfFoldedDeviceStates");
+            mRearDisplayStates = getStatesFromConfig(systemRes, "config_rearDisplayDeviceStates");
+            try {
+                mDeviceStateManager = context.getSystemService(DeviceStateManager.class);
+                mDeviceStateManager.registerCallback(context.getMainExecutor(), this);
+                Log.v(TAG, "DeviceStateAssessor initialized halfFoldedStates.length="
+                        + mHalfFoldedStates.length + ", readDisplayStates.length="
+                        + mRearDisplayStates.length);
+            } catch (java.lang.IllegalStateException e) {
+                Log.v(TAG, "DeviceStateManager not available: cannot check for half-fold");
+            }
+        }
+
+        private int[] getStatesFromConfig(Resources systemRes, String configKey) {
+            int statesArrayIdentifier = systemRes.getIdentifier(configKey, "array", "android");
+            if (statesArrayIdentifier == 0) {
+                return new int[0];
+            } else {
+                return systemRes.getIntArray(statesArrayIdentifier);
+            }
+        }
+
+        public void onStateChanged(int state) {
+            synchronized (this) {
+                mCurrentState = state;
+                this.notify();
+            }
+        }
+
+        void close() {
+            if (mDeviceStateManager != null) {
+                mDeviceStateManager.unregisterCallback(this);
+            }
+        }
+
+        boolean isDeviceInState(DeviceStateEnum deviceState) throws InterruptedException {
+            int[] states;
+            switch(deviceState) {
+                case HALF_FOLDED:
+                    states = mHalfFoldedStates;
+                    break;
+                case REAR_DISPLAY:
+                    states = mRearDisplayStates;
+                    break;
+                default:
+                    return false;
+            }
+            if (states.length == 0 || mDeviceStateManager == null) {
+                return false;
+            }
+            synchronized (this) {
+                if (mCurrentState == -1) {
+                    this.wait(1000);
+                }
+            }
+            if (mCurrentState == -1) {
+                Log.w(TAG, "DeviceStateCallback not called within 1 second");
+            }
+            Log.v(TAG, "Current state=" + mCurrentState + ", states[0]="
+                    + states[0]);
+            return Arrays.stream(states).anyMatch(x -> x == mCurrentState);
+        }
+    }
+
+    public static boolean isDeviceInState(Context context, DeviceStateEnum deviceState) {
+        DeviceStateAssessor deviceStateAssessor = new DeviceStateAssessor(context);
+        try {
+            return deviceStateAssessor.isDeviceInState(deviceState);
+        } catch (InterruptedException e) {
+            return false;
+        } finally {
+            deviceStateAssessor.close();
+        }
+    }
+
     public static class FieldClassificationResult {
         public final AutofillId id;
         public final String[] categoryIds;
diff --git a/tests/camera/src/android/hardware/camera2/cts/AllocationTest.java b/tests/camera/src/android/hardware/camera2/cts/AllocationTest.java
deleted file mode 100644
index a3e6256..0000000
--- a/tests/camera/src/android/hardware/camera2/cts/AllocationTest.java
+++ /dev/null
@@ -1,914 +0,0 @@
-/*
- * Copyright 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.hardware.camera2.cts;
-
-import static android.graphics.ImageFormat.YUV_420_888;
-import static android.hardware.camera2.cts.helpers.Preconditions.*;
-import static android.hardware.camera2.cts.helpers.AssertHelpers.*;
-import static android.hardware.camera2.cts.CameraTestUtils.*;
-import static com.android.ex.camera2.blocking.BlockingStateCallback.*;
-import static junit.framework.Assert.*;
-
-import android.content.Context;
-import android.graphics.ImageFormat;
-import android.graphics.RectF;
-
-import android.hardware.camera2.cts.Camera2ParameterizedTestCase;
-import android.hardware.camera2.CameraAccessException;
-import android.hardware.camera2.CameraCaptureSession;
-import android.hardware.camera2.CameraCharacteristics;
-import android.hardware.camera2.CameraDevice;
-import android.hardware.camera2.CameraManager;
-import android.hardware.camera2.CameraMetadata;
-import android.hardware.camera2.CaptureRequest;
-import android.hardware.camera2.CaptureResult;
-import android.hardware.camera2.TotalCaptureResult;
-import android.hardware.camera2.params.ColorSpaceTransform;
-import android.hardware.camera2.params.RggbChannelVector;
-import android.hardware.camera2.params.StreamConfigurationMap;
-import android.util.Size;
-import android.hardware.camera2.cts.helpers.MaybeNull;
-import android.hardware.camera2.cts.helpers.StaticMetadata;
-import android.hardware.camera2.cts.rs.RenderScriptSingleton;
-import android.hardware.camera2.cts.rs.ScriptGraph;
-import android.hardware.camera2.cts.rs.ScriptYuvCrop;
-import android.hardware.camera2.cts.rs.ScriptYuvMeans1d;
-import android.hardware.camera2.cts.rs.ScriptYuvMeans2dTo1d;
-import android.hardware.camera2.cts.rs.ScriptYuvToRgb;
-import android.os.Handler;
-import android.os.HandlerThread;
-import android.renderscript.Allocation;
-import android.renderscript.Script.LaunchOptions;
-import android.util.Log;
-import android.util.Rational;
-import android.view.Surface;
-
-import androidx.test.InstrumentationRegistry;
-
-import com.android.ex.camera2.blocking.BlockingCameraManager.BlockingOpenException;
-import com.android.ex.camera2.blocking.BlockingStateCallback;
-import com.android.ex.camera2.blocking.BlockingSessionCallback;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.Test;
-
-/**
- * Suite of tests for camera2 -> RenderScript APIs.
- *
- * <p>It uses CameraDevice as producer, camera sends the data to the surface provided by
- * Allocation. Only the below format is tested:</p>
- *
- * <p>YUV_420_888: flexible YUV420, it is a mandatory format for camera.</p>
- */
-
-@RunWith(Parameterized.class)
-public class AllocationTest extends Camera2ParameterizedTestCase {
-    private static final String TAG = "AllocationTest";
-    private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
-
-    private CameraDevice mCamera;
-    private CameraCaptureSession mSession;
-    private BlockingStateCallback mCameraListener;
-    private BlockingSessionCallback mSessionListener;
-
-
-    private Handler mHandler;
-    private HandlerThread mHandlerThread;
-
-    private CameraIterable mCameraIterable;
-    private SizeIterable mSizeIterable;
-    private ResultIterable mResultIterable;
-
-    @Override
-    public void setUp() throws Exception {
-        super.setUp();
-        mHandlerThread = new HandlerThread("AllocationTest");
-        mHandlerThread.start();
-        mHandler = new Handler(mHandlerThread.getLooper());
-        mCameraListener = new BlockingStateCallback();
-
-        mCameraIterable = new CameraIterable();
-        mSizeIterable = new SizeIterable();
-        mResultIterable = new ResultIterable();
-
-        RenderScriptSingleton.setContext(mContext);
-    }
-
-    @Override
-    public void tearDown() throws Exception {
-        MaybeNull.close(mCamera);
-        RenderScriptSingleton.clearContext();
-        mHandlerThread.quitSafely();
-        mHandler = null;
-        super.tearDown();
-    }
-
-    /**
-     * Update the request with a default manual request template.
-     *
-     * @param request A builder for a CaptureRequest
-     * @param sensitivity ISO gain units (e.g. 100)
-     * @param expTimeNs Exposure time in nanoseconds
-     */
-    private static void setManualCaptureRequest(CaptureRequest.Builder request, int sensitivity,
-            long expTimeNs) {
-        final Rational ONE = new Rational(1, 1);
-        final Rational ZERO = new Rational(0, 1);
-
-        if (VERBOSE) {
-            Log.v(TAG, String.format("Create manual capture request, sensitivity = %d, expTime = %f",
-                    sensitivity, expTimeNs / (1000.0 * 1000)));
-        }
-
-        request.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_OFF);
-        request.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF);
-        request.set(CaptureRequest.CONTROL_AWB_MODE, CaptureRequest.CONTROL_AWB_MODE_OFF);
-        request.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_OFF);
-        request.set(CaptureRequest.CONTROL_EFFECT_MODE, CaptureRequest.CONTROL_EFFECT_MODE_OFF);
-        request.set(CaptureRequest.SENSOR_FRAME_DURATION, 0L);
-        request.set(CaptureRequest.SENSOR_SENSITIVITY, sensitivity);
-        request.set(CaptureRequest.SENSOR_EXPOSURE_TIME, expTimeNs);
-        request.set(CaptureRequest.COLOR_CORRECTION_MODE,
-                CaptureRequest.COLOR_CORRECTION_MODE_TRANSFORM_MATRIX);
-
-        // Identity transform
-        request.set(CaptureRequest.COLOR_CORRECTION_TRANSFORM,
-            new ColorSpaceTransform(new Rational[] {
-                ONE, ZERO, ZERO,
-                ZERO, ONE, ZERO,
-                ZERO, ZERO, ONE
-            }));
-
-        // Identity gains
-        request.set(CaptureRequest.COLOR_CORRECTION_GAINS,
-                new RggbChannelVector(1.0f, 1.0f, 1.0f, 1.0f ));
-        request.set(CaptureRequest.TONEMAP_MODE, CaptureRequest.TONEMAP_MODE_FAST);
-    }
-
-    /**
-     * Calculate the absolute crop window from a {@link Size},
-     * and configure {@link LaunchOptions} for it.
-     */
-    // TODO: split patch crop window and the application against a particular size into 2 classes
-    public static class Patch {
-        /**
-         * Create a new {@link Patch} from relative crop coordinates.
-         *
-         * <p>All float values must be normalized coordinates between [0, 1].</p>
-         *
-         * @param size Size of the original rectangle that is being cropped.
-         * @param xNorm The X coordinate defining the left side of the rectangle (in [0, 1]).
-         * @param yNorm The Y coordinate defining the top side of the rectangle (in [0, 1]).
-         * @param wNorm The width of the crop rectangle (normalized between [0, 1]).
-         * @param hNorm The height of the crop rectangle (normalized between [0, 1]).
-         *
-         * @throws NullPointerException if size was {@code null}.
-         * @throws AssertionError if any of the normalized coordinates were out of range
-         */
-        public Patch(Size size, float xNorm, float yNorm, float wNorm, float hNorm) {
-            checkNotNull("size", size);
-
-            assertInRange(xNorm, 0.0f, 1.0f);
-            assertInRange(yNorm, 0.0f, 1.0f);
-            assertInRange(wNorm, 0.0f, 1.0f);
-            assertInRange(hNorm, 0.0f, 1.0f);
-
-            wFull = size.getWidth();
-            hFull = size.getWidth();
-
-            xTile = (int)Math.ceil(xNorm * wFull);
-            yTile = (int)Math.ceil(yNorm * hFull);
-
-            wTile = (int)Math.ceil(wNorm * wFull);
-            hTile = (int)Math.ceil(hNorm * hFull);
-
-            mSourceSize = size;
-        }
-
-        /**
-         * Get the original size used to create this {@link Patch}.
-         *
-         * @return source size
-         */
-        public Size getSourceSize() {
-            return mSourceSize;
-        }
-
-        /**
-         * Get the cropped size after applying the normalized crop window.
-         *
-         * @return cropped size
-         */
-        public Size getSize() {
-            return new Size(wFull, hFull);
-        }
-
-        /**
-         * Get the {@link LaunchOptions} that can be used with a {@link android.renderscript.Script}
-         * to apply a kernel over a subset of an {@link Allocation}.
-         *
-         * @return launch options
-         */
-        public LaunchOptions getLaunchOptions() {
-            return (new LaunchOptions())
-                    .setX(xTile, xTile + wTile)
-                    .setY(yTile, yTile + hTile);
-        }
-
-        /**
-         * Get the cropped width after applying the normalized crop window.
-         *
-         * @return cropped width
-         */
-        public int getWidth() {
-            return wTile;
-        }
-
-        /**
-         * Get the cropped height after applying the normalized crop window.
-         *
-         * @return cropped height
-         */
-        public int getHeight() {
-            return hTile;
-        }
-
-        /**
-         * Convert to a {@link RectF} where each corner is represented by a
-         * normalized coordinate in between [0.0, 1.0] inclusive.
-         *
-         * @return a new rectangle
-         */
-        public RectF toRectF() {
-            return new RectF(
-                    xTile * 1.0f / wFull,
-                    yTile * 1.0f / hFull,
-                    (xTile + wTile) * 1.0f / wFull,
-                    (yTile + hTile) * 1.0f / hFull);
-        }
-
-        private final Size mSourceSize;
-        private final int wFull;
-        private final int hFull;
-        private final int xTile;
-        private final int yTile;
-        private final int wTile;
-        private final int hTile;
-    }
-
-    /**
-     * Convert a single YUV pixel (3 byte elements) to an RGB pixel.
-     *
-     * <p>The color channels must be in the following order:
-     * <ul><li>Y - 0th channel
-     * <li>U - 1st channel
-     * <li>V - 2nd channel
-     * </ul></p>
-     *
-     * <p>Each channel has data in the range 0-255.</p>
-     *
-     * <p>Output data is a 3-element pixel with each channel in the range of [0,1].
-     * Each channel is saturated to avoid over/underflow.</p>
-     *
-     * <p>The conversion is done using JFIF File Interchange Format's "Conversion to and from RGB":
-     * <ul>
-     * <li>R = Y + 1.042 (Cr - 128)
-     * <li>G = Y - 0.34414 (Cb - 128) - 0.71414 (Cr - 128)
-     * <li>B = Y + 1.772 (Cb - 128)
-     * </ul>
-     *
-     * Where Cr and Cb are aliases of V and U respectively.
-     * </p>
-     *
-     * @param yuvData An array of a YUV pixel (at least 3 bytes large)
-     *
-     * @return an RGB888 pixel with each channel in the range of [0,1]
-     */
-    private static float[] convertPixelYuvToRgb(byte[] yuvData) {
-        final int CHANNELS = 3; // yuv
-        final float COLOR_RANGE = 255f;
-
-        assertTrue("YUV pixel must be at least 3 bytes large", CHANNELS <= yuvData.length);
-
-        float[] rgb = new float[CHANNELS];
-
-        float y = yuvData[0] & 0xFF;  // Y channel
-        float cb = yuvData[1] & 0xFF; // U channel
-        float cr = yuvData[2] & 0xFF; // V channel
-
-        // convert YUV -> RGB (from JFIF's "Conversion to and from RGB" section)
-        float r = y + 1.402f * (cr - 128);
-        float g = y - 0.34414f * (cb - 128) - 0.71414f * (cr - 128);
-        float b = y + 1.772f * (cb - 128);
-
-        // normalize [0,255] -> [0,1]
-        rgb[0] = r / COLOR_RANGE;
-        rgb[1] = g / COLOR_RANGE;
-        rgb[2] = b / COLOR_RANGE;
-
-        // Clamp to range [0,1]
-        for (int i = 0; i < CHANNELS; ++i) {
-            rgb[i] = Math.max(0.0f, Math.min(1.0f, rgb[i]));
-        }
-
-        if (VERBOSE) {
-            Log.v(TAG, String.format("RGB calculated (r,g,b) = (%f, %f, %f)", rgb[0], rgb[1],
-                    rgb[2]));
-        }
-
-        return rgb;
-    }
-
-    /**
-     * Configure the camera with the target surface;
-     * create a capture request builder with {@code cameraTarget} as the sole surface target.
-     *
-     * <p>Outputs are configured with the new surface targets, and this function blocks until
-     * the camera has finished configuring.</p>
-     *
-     * <p>The capture request is created from the {@link CameraDevice#TEMPLATE_PREVIEW} template.
-     * No other keys are set.
-     * </p>
-     */
-    private CaptureRequest.Builder configureAndCreateRequestForSurface(Surface cameraTarget)
-            throws CameraAccessException {
-        List<Surface> outputSurfaces = new ArrayList<Surface>(/*capacity*/1);
-        assertNotNull("Failed to get Surface", cameraTarget);
-        outputSurfaces.add(cameraTarget);
-
-        mSessionListener = new BlockingSessionCallback();
-        mCamera.createCaptureSession(outputSurfaces, mSessionListener, mHandler);
-        mSession = mSessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
-        CaptureRequest.Builder captureBuilder =
-                mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
-        assertNotNull("Fail to create captureRequest", captureBuilder);
-        captureBuilder.addTarget(cameraTarget);
-
-        if (VERBOSE) Log.v(TAG, "configureAndCreateRequestForSurface - done");
-
-        return captureBuilder;
-    }
-
-    /**
-     * Submit a single request to the camera, block until the buffer is available.
-     *
-     * <p>Upon return from this function, script has been executed against the latest buffer.
-     * </p>
-     */
-    private void captureSingleShotAndExecute(CaptureRequest request, ScriptGraph graph)
-            throws CameraAccessException {
-        checkNotNull("request", request);
-        checkNotNull("graph", graph);
-
-        long exposureTimeNs = -1;
-        int controlMode = -1;
-        int aeMode = -1;
-        if (request.get(CaptureRequest.CONTROL_MODE) != null) {
-            controlMode = request.get(CaptureRequest.CONTROL_MODE);
-        }
-        if (request.get(CaptureRequest.CONTROL_AE_MODE) != null) {
-            aeMode = request.get(CaptureRequest.CONTROL_AE_MODE);
-        }
-        if ((request.get(CaptureRequest.SENSOR_EXPOSURE_TIME) != null) &&
-                ((controlMode == CaptureRequest.CONTROL_MODE_OFF) ||
-                 (aeMode == CaptureRequest.CONTROL_AE_MODE_OFF))) {
-            exposureTimeNs = request.get(CaptureRequest.SENSOR_EXPOSURE_TIME);
-        }
-        mSession.capture(request, new CameraCaptureSession.CaptureCallback() {
-            @Override
-            public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
-                    TotalCaptureResult result) {
-                if (VERBOSE) Log.v(TAG, "Capture completed");
-            }
-        }, mHandler);
-
-        if (VERBOSE) Log.v(TAG, "Waiting for single shot buffer");
-        if (exposureTimeNs > 0) {
-            graph.advanceInputWaiting(
-                    java.util.concurrent.TimeUnit.NANOSECONDS.toMillis(exposureTimeNs));
-        } else {
-            graph.advanceInputWaiting();
-        }
-        if (VERBOSE) Log.v(TAG, "Got the buffer");
-        graph.execute();
-    }
-
-    private void stopCapture() throws CameraAccessException {
-        if (VERBOSE) Log.v(TAG, "Stopping capture and waiting for idle");
-        // Stop repeat, wait for captures to complete, and disconnect from surfaces
-        mSession.close();
-        mSessionListener.getStateWaiter().waitForState(BlockingSessionCallback.SESSION_CLOSED,
-                SESSION_CLOSE_TIMEOUT_MS);
-        mSession = null;
-        mSessionListener = null;
-    }
-
-    /**
-     * Extremely dumb validator. Makes sure there is at least one non-zero RGB pixel value.
-     */
-    private void validateInputOutputNotZeroes(ScriptGraph scriptGraph, Size size) {
-        final int BPP = 8; // bits per pixel
-
-        int width = size.getWidth();
-        int height = size.getHeight();
-        /**
-         * Check the input allocation is valid.
-         * - Byte size matches what we expect.
-         * - The input is not all zeroes.
-         */
-
-        // Check that input data was updated first. If it wasn't, the rest of the test will fail.
-        byte[] data = scriptGraph.getInputData();
-        assertArrayNotAllZeroes("Input allocation data was not updated", data);
-
-        // Minimal required size to represent YUV 4:2:0 image
-        int packedSize =
-                width * height * ImageFormat.getBitsPerPixel(YUV_420_888) / BPP;
-        if (VERBOSE) Log.v(TAG, "Expected image size = " + packedSize);
-        int actualSize = data.length;
-        // Actual size may be larger due to strides or planes being non-contiguous
-        assertTrue(
-                String.format(
-                        "YUV 420 packed size (%d) should be at least as large as the actual size " +
-                        "(%d)", packedSize, actualSize), packedSize <= actualSize);
-        /**
-         * Check the output allocation by converting to RGBA.
-         * - Byte size matches what we expect
-         * - The output is not all zeroes
-         */
-        final int RGBA_CHANNELS = 4;
-
-        int actualSizeOut = scriptGraph.getOutputAllocation().getBytesSize();
-        int packedSizeOut = width * height * RGBA_CHANNELS;
-
-        byte[] dataOut = scriptGraph.getOutputData();
-        assertEquals("RGB mismatched byte[] and expected size",
-                packedSizeOut, dataOut.length);
-
-        if (VERBOSE) {
-            Log.v(TAG, "checkAllocationByConvertingToRgba - RGB data size " + dataOut.length);
-        }
-
-        assertArrayNotAllZeroes("RGBA data was not updated", dataOut);
-        // RGBA8888 stride should be equal to the width
-        assertEquals("RGBA 8888 mismatched byte[] and expected size", packedSizeOut, actualSizeOut);
-
-        if (VERBOSE) Log.v(TAG, "validating Buffer , size = " + actualSize);
-    }
-
-    @Test
-    public void testAllocationFromCameraFlexibleYuv() throws Exception {
-
-        /** number of frame (for streaming requests) to be verified. */
-        final int NUM_FRAME_VERIFIED = 1;
-
-        mCameraIterable.forEachCamera(new CameraBlock() {
-            @Override
-            public void run(CameraDevice camera) throws CameraAccessException {
-
-                // Iterate over each size in the camera
-                mSizeIterable.forEachSize(YUV_420_888, new SizeBlock() {
-                    @Override
-                    public void run(final Size size) throws CameraAccessException {
-                        // Create a script graph that converts YUV to RGB
-                        try (ScriptGraph scriptGraph = ScriptGraph.create()
-                                .configureInputWithSurface(size, YUV_420_888)
-                                .chainScript(ScriptYuvToRgb.class)
-                                .buildGraph()) {
-
-                            if (VERBOSE) Log.v(TAG, "Prepared ScriptYuvToRgb for size " + size);
-
-                            // Run the graph against camera input and validate we get some input
-                            CaptureRequest request =
-                                    configureAndCreateRequestForSurface(scriptGraph.getInputSurface()).build();
-
-                            // Block until we get 1 result, then iterate over the result
-                            mResultIterable.forEachResultRepeating(
-                                    request, NUM_FRAME_VERIFIED, new ResultBlock() {
-                                @Override
-                                public void run(CaptureResult result) throws CameraAccessException {
-                                    scriptGraph.advanceInputWaiting();
-                                    scriptGraph.execute();
-                                    validateInputOutputNotZeroes(scriptGraph, size);
-                                    scriptGraph.advanceInputAndDrop();
-                                }
-                            });
-
-                            stopCapture();
-                            if (VERBOSE) Log.v(TAG, "Cleanup Renderscript cache");
-                            scriptGraph.close();
-                            RenderScriptSingleton.clearContext();
-                            RenderScriptSingleton.setContext(mContext);
-                        }
-                    }
-                });
-            }
-        });
-    }
-
-    /**
-     * Take two shots and ensure per-frame-control with exposure/gain is working correctly.
-     *
-     * <p>Takes a shot with very low ISO and exposure time. Expect it to be black.</p>
-     *
-     * <p>Take a shot with very high ISO and exposure time. Expect it to be white.</p>
-     *
-     * @throws Exception
-     */
-    @Test
-    public void testBlackWhite() throws CameraAccessException {
-
-        /** low iso + low exposure (first shot) */
-        final float THRESHOLD_LOW = 0.025f;
-        /** high iso + high exposure (second shot) */
-        final float THRESHOLD_HIGH = 0.975f;
-
-        mCameraIterable.forEachCamera(/*fullHwLevel*/false, new CameraBlock() {
-            @Override
-            public void run(CameraDevice camera) throws CameraAccessException {
-                final StaticMetadata staticInfo =
-                        new StaticMetadata(mCameraManager.getCameraCharacteristics(camera.getId()));
-
-                // This test requires PFC and manual sensor control
-                if (!staticInfo.isCapabilitySupported(
-                        CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR) ||
-                        !staticInfo.isPerFrameControlSupported()) {
-                    return;
-                }
-
-                final Size maxSize = getMaxSize(
-                        getSupportedSizeForFormat(YUV_420_888, camera.getId(), mCameraManager));
-
-                try (ScriptGraph scriptGraph = createGraphForYuvCroppedMeans(maxSize)) {
-
-                    CaptureRequest.Builder req =
-                            configureAndCreateRequestForSurface(scriptGraph.getInputSurface());
-
-                    // Take a shot with very low ISO and exposure time. Expect it to be black.
-                    int minimumSensitivity = staticInfo.getSensitivityMinimumOrDefault();
-                    long minimumExposure = staticInfo.getExposureMinimumOrDefault();
-                    setManualCaptureRequest(req, minimumSensitivity, minimumExposure);
-
-                    CaptureRequest lowIsoExposureShot = req.build();
-                    captureSingleShotAndExecute(lowIsoExposureShot, scriptGraph);
-
-                    float[] blackMeans = convertPixelYuvToRgb(scriptGraph.getOutputData());
-
-                    // Take a shot with very high ISO and exposure time. Expect it to be white.
-                    int maximumSensitivity = staticInfo.getSensitivityMaximumOrDefault();
-                    long maximumExposure = staticInfo.getExposureMaximumOrDefault();
-                    setManualCaptureRequest(req, maximumSensitivity, maximumExposure);
-
-                    CaptureRequest highIsoExposureShot = req.build();
-                    captureSingleShotAndExecute(highIsoExposureShot, scriptGraph);
-
-                    float[] whiteMeans = convertPixelYuvToRgb(scriptGraph.getOutputData());
-
-                    // Low iso + low exposure (first shot), just check and log the error.
-                    for (int i = 0; i < blackMeans.length; ++i) {
-                        if (blackMeans[i] >= THRESHOLD_LOW) {
-                            Log.e(TAG,
-                                    String.format("Black means too high: (%s should be greater"
-                                            + " than %s; item index %d in %s)", blackMeans[i],
-                                            THRESHOLD_LOW, i,
-                                            Arrays.toString(blackMeans)));
-                        }
-                    }
-
-                    // High iso + high exposure (second shot), just check and log the error
-                    for (int i = 0; i < whiteMeans.length; ++i) {
-                        if (whiteMeans[i] <= THRESHOLD_HIGH) {
-                            Log.e(TAG,
-                                    String.format("White means too low: (%s should be less than"
-                                            + " %s; item index %d in %s)", whiteMeans[i],
-                                            THRESHOLD_HIGH, i,
-                                            Arrays.toString(whiteMeans)));
-                        }
-                    }
-                }
-            }
-        });
-    }
-
-    /**
-     * Test that the android.sensitivity.parameter is applied.
-     */
-    @Test
-    public void testParamSensitivity() throws CameraAccessException {
-        final float THRESHOLD_MAX_MIN_DIFF = 0.3f;
-        final float THRESHOLD_MAX_MIN_RATIO = 2.0f;
-        final int NUM_STEPS = 5;
-        final long EXPOSURE_TIME_NS = 2000000; // 2 ms
-        final int RGB_CHANNELS = 3;
-
-        mCameraIterable.forEachCamera(/*fullHwLevel*/false, new CameraBlock() {
-
-
-            @Override
-            public void run(CameraDevice camera) throws CameraAccessException {
-                final StaticMetadata staticInfo =
-                        new StaticMetadata(mCameraManager.getCameraCharacteristics(camera.getId()));
-                // This test requires PFC and manual sensor control
-                if (!staticInfo.isCapabilitySupported(
-                        CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR) ||
-                        !staticInfo.isPerFrameControlSupported()) {
-                    return;
-                }
-
-                final List<float[]> rgbMeans = new ArrayList<float[]>();
-                final Size maxSize = getMaxSize(
-                        getSupportedSizeForFormat(YUV_420_888, camera.getId(), mCameraManager));
-
-                final int sensitivityMin = staticInfo.getSensitivityMinimumOrDefault();
-                final int sensitivityMax = staticInfo.getSensitivityMaximumOrDefault();
-
-                // List each sensitivity from min-max in NUM_STEPS increments
-                int[] sensitivities = new int[NUM_STEPS];
-                for (int i = 0; i < NUM_STEPS; ++i) {
-                    int delta = (sensitivityMax - sensitivityMin) / (NUM_STEPS - 1);
-                    sensitivities[i] = sensitivityMin + delta * i;
-                }
-
-                try (ScriptGraph scriptGraph = createGraphForYuvCroppedMeans(maxSize)) {
-
-                    CaptureRequest.Builder req =
-                            configureAndCreateRequestForSurface(scriptGraph.getInputSurface());
-
-                    // Take burst shots with increasing sensitivity one after other.
-                    for (int i = 0; i < NUM_STEPS; ++i) {
-                        setManualCaptureRequest(req, sensitivities[i], EXPOSURE_TIME_NS);
-                        captureSingleShotAndExecute(req.build(), scriptGraph);
-                        float[] means = convertPixelYuvToRgb(scriptGraph.getOutputData());
-                        rgbMeans.add(means);
-
-                        if (VERBOSE) {
-                            Log.v(TAG, "testParamSensitivity - captured image " + i +
-                                    " with RGB means: " + Arrays.toString(means));
-                        }
-                    }
-
-                    // Test that every consecutive image gets brighter.
-                    for (int i = 0; i < rgbMeans.size() - 1; ++i) {
-                        float[] curMeans = rgbMeans.get(i);
-                        float[] nextMeans = rgbMeans.get(i+1);
-
-                        float[] left = curMeans;
-                        float[] right = nextMeans;
-                        String leftString = Arrays.toString(left);
-                        String rightString = Arrays.toString(right);
-
-                        String msgHeader =
-                                String.format("Shot with sensitivity %d should not have higher " +
-                                "average means than shot with sensitivity %d",
-                                sensitivities[i], sensitivities[i+1]);
-                        for (int m = 0; m < left.length; ++m) {
-                            String msg = String.format(
-                                    "%s: (%s should be less than or equal to %s; item index %d;"
-                                    + " left = %s; right = %s)",
-                                    msgHeader, left[m], right[m], m, leftString, rightString);
-                            if (left[m] > right[m]) {
-                                Log.e(TAG, msg);
-                            }
-                        }
-                    }
-
-                    // Test the min-max diff and ratios are within expected thresholds
-                    float[] lastMeans = rgbMeans.get(NUM_STEPS - 1);
-                    float[] firstMeans = rgbMeans.get(/*location*/0);
-                    for (int i = 0; i < RGB_CHANNELS; ++i) {
-                        if (lastMeans[i] - firstMeans[i] <= THRESHOLD_MAX_MIN_DIFF) {
-                            Log.w(TAG, String.format("Sensitivity max-min diff too small"
-                                    + "(max=%f, min=%f)", lastMeans[i], firstMeans[i]));
-                        }
-                        if (lastMeans[i] / firstMeans[i] <= THRESHOLD_MAX_MIN_RATIO) {
-                            Log.w(TAG, String.format("Sensitivity max-min ratio too small"
-                                    + "(max=%f, min=%f)", lastMeans[i], firstMeans[i]));
-                        }
-                    }
-                }
-            }
-        });
-
-    }
-
-    /**
-     * Common script graph for manual-capture based tests that determine the average pixel
-     * values of a cropped sub-region.
-     *
-     * <p>Processing chain:
-     *
-     * <pre>
-     * input:  YUV_420_888 surface
-     * output: mean YUV value of a central section of the image,
-     *         YUV 4:4:4 encoded as U8_3
-     * steps:
-     *      1) crop [0.45,0.45] - [0.55, 0.55]
-     *      2) average columns
-     *      3) average rows
-     * </pre>
-     * </p>
-     */
-    private static ScriptGraph createGraphForYuvCroppedMeans(final Size size) {
-        ScriptGraph scriptGraph = ScriptGraph.create()
-                .configureInputWithSurface(size, YUV_420_888)
-                .configureScript(ScriptYuvCrop.class)
-                    .set(ScriptYuvCrop.CROP_WINDOW,
-                            new Patch(size, /*x*/0.45f, /*y*/0.45f, /*w*/0.1f, /*h*/0.1f).toRectF())
-                    .buildScript()
-                .chainScript(ScriptYuvMeans2dTo1d.class)
-                .chainScript(ScriptYuvMeans1d.class)
-                // TODO: Make a script for YUV 444 -> RGB 888 conversion
-                .buildGraph();
-        return scriptGraph;
-    }
-
-    /*
-     * TODO: Refactor below code into separate classes and to not depend on AllocationTest
-     * inner variables.
-     *
-     * TODO: add javadocs to below methods
-     *
-     * TODO: Figure out if there's some elegant way to compose these forEaches together, so that
-     * the callers don't have to do a ton of nesting
-     */
-
-    interface CameraBlock {
-        void run(CameraDevice camera) throws CameraAccessException;
-    }
-
-    class CameraIterable {
-        public void forEachCamera(CameraBlock runnable)
-                throws CameraAccessException {
-            forEachCamera(/*fullHwLevel*/false, runnable);
-        }
-
-        public void forEachCamera(boolean fullHwLevel, CameraBlock runnable)
-                throws CameraAccessException {
-            assertNotNull("No camera manager", mCameraManager);
-            assertNotNull("No camera IDs", mCameraIdsUnderTest);
-
-            for (int i = 0; i < mCameraIdsUnderTest.length; i++) {
-                // Don't execute the runnable against non-FULL cameras if FULL is required
-                CameraCharacteristics properties =
-                        mCameraManager.getCameraCharacteristics(mCameraIdsUnderTest[i]);
-                StaticMetadata staticInfo = new StaticMetadata(properties);
-                if (fullHwLevel && !staticInfo.isHardwareLevelAtLeastFull()) {
-                    Log.i(TAG, String.format(
-                            "Skipping this test for camera %s, needs FULL hw level",
-                            mCameraIdsUnderTest[i]));
-                    continue;
-                }
-                if (!staticInfo.isColorOutputSupported()) {
-                    Log.i(TAG, String.format(
-                        "Skipping this test for camera %s, does not support regular outputs",
-                        mCameraIdsUnderTest[i]));
-                    continue;
-                }
-                // Open camera and execute test
-                Log.i(TAG, "Testing Camera " + mCameraIdsUnderTest[i]);
-                try {
-                    openDevice(mCameraIdsUnderTest[i]);
-
-                    runnable.run(mCamera);
-                } finally {
-                    closeDevice(mCameraIdsUnderTest[i]);
-                }
-            }
-        }
-
-        private void openDevice(String cameraId) {
-            if (mCamera != null) {
-                throw new IllegalStateException("Already have open camera device");
-            }
-            try {
-                mCamera = openCamera(
-                    mCameraManager, cameraId, mCameraListener, mHandler);
-            } catch (CameraAccessException e) {
-                fail("Fail to open camera synchronously, " + Log.getStackTraceString(e));
-            } catch (BlockingOpenException e) {
-                fail("Fail to open camera asynchronously, " + Log.getStackTraceString(e));
-            }
-            mCameraListener.waitForState(STATE_OPENED, CAMERA_OPEN_TIMEOUT_MS);
-        }
-
-        private void closeDevice(String cameraId) {
-            if (mCamera != null) {
-                mCamera.close();
-                mCameraListener.waitForState(STATE_CLOSED, CAMERA_CLOSE_TIMEOUT_MS);
-                mCamera = null;
-            }
-        }
-    }
-
-    interface SizeBlock {
-        void run(Size size) throws CameraAccessException;
-    }
-
-    class SizeIterable {
-        public void forEachSize(int format, SizeBlock runnable) throws CameraAccessException {
-            assertNotNull("No camera opened", mCamera);
-            assertNotNull("No camera manager", mCameraManager);
-
-            CameraCharacteristics properties =
-                    mCameraManager.getCameraCharacteristics(mCamera.getId());
-
-            assertNotNull("Can't get camera properties!", properties);
-
-            StreamConfigurationMap config =
-                    properties.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
-            int[] availableOutputFormats = config.getOutputFormats();
-            assertArrayNotEmpty(availableOutputFormats,
-                    "availableOutputFormats should not be empty");
-            Arrays.sort(availableOutputFormats);
-            assertTrue("Can't find the format " + format + " in supported formats " +
-                    Arrays.toString(availableOutputFormats),
-                    Arrays.binarySearch(availableOutputFormats, format) >= 0);
-
-            Size[] availableSizes = getSupportedSizeForFormat(format, mCamera.getId(),
-                    mCameraManager);
-            assertArrayNotEmpty(availableSizes, "availableSizes should not be empty");
-
-            for (Size size : availableSizes) {
-
-                if (VERBOSE) {
-                    Log.v(TAG, "Testing size " + size.toString() +
-                            " for camera " + mCamera.getId());
-                }
-                runnable.run(size);
-            }
-        }
-    }
-
-    interface ResultBlock {
-        void run(CaptureResult result) throws CameraAccessException;
-    }
-
-    class ResultIterable {
-        public void forEachResultOnce(CaptureRequest request, ResultBlock block)
-                throws CameraAccessException {
-            forEachResult(request, /*count*/1, /*repeating*/false, block);
-        }
-
-        public void forEachResultRepeating(CaptureRequest request, int count, ResultBlock block)
-                throws CameraAccessException {
-            forEachResult(request, count, /*repeating*/true, block);
-        }
-
-        public void forEachResult(CaptureRequest request, int count, boolean repeating,
-                ResultBlock block) throws CameraAccessException {
-
-            // TODO: start capture, i.e. configureOutputs
-
-            SimpleCaptureCallback listener = new SimpleCaptureCallback();
-
-            if (!repeating) {
-                for (int i = 0; i < count; ++i) {
-                    mSession.capture(request, listener, mHandler);
-                }
-            } else {
-                mSession.setRepeatingRequest(request, listener, mHandler);
-            }
-
-            // Assume that the device is already IDLE.
-            mSessionListener.getStateWaiter().waitForState(BlockingSessionCallback.SESSION_ACTIVE,
-                    CAMERA_ACTIVE_TIMEOUT_MS);
-
-            for (int i = 0; i < count; ++i) {
-                if (VERBOSE) {
-                    Log.v(TAG, String.format("Testing with result %d of %d for camera %s",
-                            i, count, mCamera.getId()));
-                }
-
-                CaptureResult result = listener.getCaptureResult(CAPTURE_RESULT_TIMEOUT_MS);
-                block.run(result);
-            }
-
-            if (repeating) {
-                mSession.stopRepeating();
-                mSessionListener.getStateWaiter().waitForState(
-                    BlockingSessionCallback.SESSION_READY, CAMERA_IDLE_TIMEOUT_MS);
-            }
-
-            // TODO: Make a Configure decorator or some such for configureOutputs
-        }
-    }
-}
diff --git a/tests/camera/src/android/hardware/camera2/cts/DngCreatorTest.java b/tests/camera/src/android/hardware/camera2/cts/DngCreatorTest.java
index 816ee6c6..acb5919 100644
--- a/tests/camera/src/android/hardware/camera2/cts/DngCreatorTest.java
+++ b/tests/camera/src/android/hardware/camera2/cts/DngCreatorTest.java
@@ -38,7 +38,6 @@
 import android.hardware.camera2.cts.helpers.StaticMetadata;
 import android.hardware.camera2.cts.rs.BitmapUtils;
 import android.hardware.camera2.cts.rs.RawConverter;
-import android.hardware.camera2.cts.rs.RenderScriptSingleton;
 import android.hardware.camera2.cts.testcases.Camera2AndroidTestCase;
 import android.hardware.camera2.params.InputConfiguration;
 import android.location.Location;
@@ -135,18 +134,6 @@
         Bitmap rawBitmap;
     }
 
-    @Override
-    public void setUp() throws Exception {
-        super.setUp();
-        RenderScriptSingleton.setContext(mContext);
-    }
-
-    @Override
-    public void tearDown() throws Exception {
-        RenderScriptSingleton.clearContext();
-        super.tearDown();
-    }
-
     /**
      * Test basic raw capture and DNG saving functionality for each of the available cameras.
      *
@@ -533,7 +520,7 @@
                 raw.getPlanes()[0].getBuffer().get(rawPlane);
                 raw.getPlanes()[0].getBuffer().rewind();
 
-                RawConverter.convertToSRGB(RenderScriptSingleton.getRS(), raw.getWidth(),
+                RawConverter.convertToSRGB(raw.getWidth(),
                         raw.getHeight(), raw.getPlanes()[0].getRowStride(), rawPlane,
                         data.characteristics, /*captureREsult*/data.raw.second, /*offsetX*/ 0,
                         /*offsetY*/ 0, /*out*/ rawBitmap);
@@ -617,7 +604,7 @@
                 raw.getPlanes()[0].getBuffer().get(rawPlane);
                 raw.getPlanes()[0].getBuffer().rewind();
 
-                RawConverter.convertToSRGB(RenderScriptSingleton.getRS(), raw.getWidth(),
+                RawConverter.convertToSRGB(raw.getWidth(),
                         raw.getHeight(), raw.getPlanes()[0].getRowStride(), rawPlane,
                         data.characteristics, data.imagePair.second, /*offsetX*/ 0, /*offsetY*/ 0,
                         /*out*/ rawBitmap);
diff --git a/tests/camera/src/android/hardware/camera2/cts/rs/RawConverter.java b/tests/camera/src/android/hardware/camera2/cts/rs/RawConverter.java
index 3f50014..1290483 100644
--- a/tests/camera/src/android/hardware/camera2/cts/rs/RawConverter.java
+++ b/tests/camera/src/android/hardware/camera2/cts/rs/RawConverter.java
@@ -20,17 +20,8 @@
 import android.hardware.camera2.CameraCharacteristics;
 import android.hardware.camera2.CameraMetadata;
 import android.hardware.camera2.CaptureResult;
-import android.hardware.camera2.cts.ScriptC_raw_converter;
 import android.hardware.camera2.params.ColorSpaceTransform;
 import android.hardware.camera2.params.LensShadingMap;
-import android.renderscript.Allocation;
-import android.renderscript.Element;
-import android.renderscript.Float3;
-import android.renderscript.Float4;
-import android.renderscript.Int4;
-import android.renderscript.Matrix3f;
-import android.renderscript.RenderScript;
-import android.renderscript.Type;
 import android.util.Log;
 import android.util.Rational;
 import android.util.SparseIntArray;
@@ -207,6 +198,598 @@
         }
     }
 
+    // Port of RAW16 converter from renderscript to Java.
+    // Comments copied verbatim from raw_converter.rscript
+
+    // This file includes a conversion kernel for RGGB, GRBG, GBRG, and BGGR Bayer patterns.
+    // Applying this script also will apply black-level subtraction, rescaling, clipping,
+    // tonemapping, and color space transforms along with the Bayer demosaic.
+    // See RawConverter.java for more information.
+    static class ConverterKernel {
+
+        // RAW16 buffer of dimensions (raw image stride) * (raw image height)
+        byte[] mInput;
+
+        // Whitelevel of sensor
+        int mWhiteLevel;
+
+        // X offset into inputRawBuffer
+        int mOffsetX;
+
+        // Y offset into inputRawBuffer
+        int mOffsetY;
+
+        // Width of raw buffer
+        int mInputWidth;
+
+        // Height of raw buffer
+        int mInputHeight;
+
+        // Stride of raw buffer
+        int mInputStride;
+
+        // Coefficients for a polynomial tonemapping curve
+        float[/*4*/] mToneMapCoeffs;
+
+        // Does gainmap exist?
+        boolean mHasGainMap;
+
+        // Gainmap to apply to linearized raw sensor data.
+        float[] mGainMap;
+
+        // The width of the gain map
+        int mGainMapWidth;
+
+        // The height of the gain map
+        int mGainMapHeight;
+
+        // Is monochrome camera?
+        boolean mIsMonochrome;
+
+        // Color transform from sensor to a wide-gamut colorspace
+        float[/*9*/] mSensorToIntermediate;
+
+        // Color transform from wide-gamut colorspace to sRGB
+        float[/*9*/] mIntermediateToSRGB;
+
+        // The camera neutral
+        float[/*3*/] mNeutralPoint;
+
+        // The Color Filter Arrangement pattern used
+        int mCfaPattern;
+
+        // Blacklevel to subtract for each channel, given in CFA order
+        int[/*4*/] mBlackLevel;
+
+        ConverterKernel() { }
+
+        void set_inputRawBuffer(byte[] input) {
+            mInput = input;
+        }
+
+        void set_whiteLevel(int whiteLevel) {
+            mWhiteLevel = whiteLevel;
+        }
+
+        void set_offsetX(int offsetX) {
+            mOffsetX = offsetX;
+        }
+
+        void set_offsetY(int offsetY) {
+            mOffsetY = offsetY;
+        }
+
+        void set_rawWidth(int inputWidth) {
+            mInputWidth = inputWidth;
+        }
+
+        void set_rawHeight(int inputHeight) {
+            mInputHeight = inputHeight;
+        }
+
+        void set_rawStride(int inputStride) {
+            mInputStride = inputStride;
+        }
+
+        void set_toneMapCoeffs(float[/*4*/] toneMapCoeffs) {
+            mToneMapCoeffs = toneMapCoeffs;
+        }
+
+        void set_hasGainMap(boolean hasGainMap) {
+            mHasGainMap = hasGainMap;
+        }
+
+        void set_gainMapWidth(int gainMapWidth) {
+            mGainMapWidth = gainMapWidth;
+        }
+
+        void set_gainMapHeight(int gainMapHeight) {
+            mGainMapHeight = gainMapHeight;
+        }
+
+        void set_gainMap(float[] gainMap) {
+            if (gainMap.length != mGainMapWidth * mGainMapHeight * 4) {
+                throw new IllegalArgumentException("Invalid float array of length " + gainMap.length
+                    + ", must be correct size for gainMap of dimensions "
+                    + mGainMapWidth + "x" + mGainMapHeight);
+            }
+            mGainMap = gainMap;
+        }
+
+        void set_isMonochrome(boolean isMonochrome) {
+            mIsMonochrome = isMonochrome;
+        }
+
+        void set_sensorToIntermediate(float[/*9*/] sensorToIntermediate) {
+            mSensorToIntermediate = sensorToIntermediate;
+        }
+
+        void set_intermediateToSRGB(float[/*9*/] intermediateToSRGB) {
+            mIntermediateToSRGB = intermediateToSRGB;
+        }
+
+        void set_neutralPoint(float[/*3*/] neutralPoint) {
+            mNeutralPoint = neutralPoint;
+        }
+
+        void set_cfaPattern(int cfaPattern) {
+            mCfaPattern = cfaPattern;
+        }
+
+        void set_blackLevelPattern(int[/*4*/] blackLevelPattern) {
+            mBlackLevel = blackLevelPattern;
+        }
+
+        private float getGain(int x, int y, int d) {
+            return mGainMap[y * mGainMapWidth * 4 + x * 4 + d];
+        }
+
+        // Interpolate gain map to find per-channel gains at a given pixel
+        private float[/*4*/] getGain(int x, int y) {
+            float interpX = (((float) x) / mInputWidth) * mGainMapWidth;
+            float interpY = (((float) y) / mInputHeight) * mGainMapHeight;
+            int gX = (int) interpX;
+            int gY = (int) interpY;
+            int gXNext = (gX + 1 < mGainMapWidth) ? gX + 1 : gX;
+            int gYNext = (gY + 1 < mGainMapHeight) ? gY + 1 : gY;
+
+            float fracX = interpX - (float) gX;
+            float fracY = interpY - (float) gY;
+            float invFracX = 1.f - fracX;
+            float invFracY = 1.f - fracY;
+
+            float[/*4*/] gain = new float[4];
+
+            for (int d = 0; d < 4; d++) {
+                float tl = getGain(gX, gY, d);
+                float tr = getGain(gXNext, gY, d);
+                float bl = getGain(gX, gYNext, d);
+                float br = getGain(gXNext, gYNext, d);
+
+                gain[d] = tl * invFracX * invFracY
+                        + tr * fracX * invFracY
+                        + bl * invFracX * fracY
+                        + br * fracX * fracY;
+            }
+
+            return gain;
+        }
+
+        // Apply gamma correction using sRGB gamma curve
+        static float gammaEncode(float x) {
+            return x <= 0.0031308f ? x * 12.92f : 1.055f * (float) Math.pow(x, 0.4166667f) - 0.055f;
+        }
+
+        // Apply gamma correction to each color channel in RGB pixel
+        static float[/*3*/] gammaCorrectPixel(float[/*3*/] rgb) {
+            rgb[0] = gammaEncode(rgb[0]);
+            rgb[1] = gammaEncode(rgb[1]);
+            rgb[2] = gammaEncode(rgb[2]);
+            return rgb;
+        }
+
+        static float clamp(float v, float l, float u) {
+            return (float) Math.min(Math.max(l, v), u);
+        }
+
+        static float[/*3*/] matrixMultiply(float[/*9*/] m, float[/*3*/] v) {
+            float x = m[0] * v[0] + m[1] * v[1] + m[2] * v[2];
+            float y = m[3] * v[0] + m[4] * v[1] + m[5] * v[2];
+            float z = m[6] * v[0] + m[7] * v[1] + m[8] * v[2];
+            v[0] = x; v[1] = y; v[2] = z;
+            return v;
+        }
+
+        // Apply a colorspace transform to the intermediate colorspace, apply
+        // a tonemapping curve, apply a colorspace transform to a final colorspace,
+        // and apply a gamma correction curve.
+        private float[/*3*/] applyColorspace(float[/*3*/] pRGB) {
+            pRGB[0] = clamp(pRGB[0], 0.f, mNeutralPoint[0]);
+            pRGB[1] = clamp(pRGB[1], 0.f, mNeutralPoint[1]);
+            pRGB[2] = clamp(pRGB[2], 0.f, mNeutralPoint[2]);
+
+            matrixMultiply(mSensorToIntermediate, pRGB);
+            tonemap(pRGB);
+            matrixMultiply(mIntermediateToSRGB, pRGB);
+
+            pRGB[0] = clamp(pRGB[0], 0.f, 1.f);
+            pRGB[1] = clamp(pRGB[1], 0.f, 1.f);
+            pRGB[2] = clamp(pRGB[2], 0.f, 1.f);
+
+            return gammaCorrectPixel(pRGB);
+        }
+
+        // Apply polynomial tonemapping curve to each color channel in RGB pixel.
+        // This attempts to apply tonemapping without changing the hue of each pixel,
+        // i.e.:
+        //
+        // For some RGB values:
+        // M = max(R, G, B)
+        // m = min(R, G, B)
+        // m' = mid(R, G, B)
+        // chroma = M - m
+        // H = m' - m / chroma
+        //
+        // The relationship H=H' should be preserved, where H and H' are calculated from
+        // the RGB and RGB' value at this pixel before and after this tonemapping
+        // operation has been applied, respectively.
+        private float[/*3*/] tonemap(float[/*3*/] rgb) {
+            rgb[0] = clamp(rgb[0], 0.f, 1.f);
+            rgb[1] = clamp(rgb[1], 0.f, 1.f);
+            rgb[2] = clamp(rgb[2], 0.f, 1.f);
+
+            float tmp;
+            int permutation = 0;
+
+            // Sort the RGB channels by value
+            if (rgb[2] < rgb[1]) {
+                tmp = rgb[2];
+                rgb[2] = rgb[1];
+                rgb[1] = tmp;
+                permutation |= 1;
+            }
+            if (rgb[1] < rgb[0]) {
+                tmp = rgb[1];
+                rgb[1] = rgb[0];
+                rgb[0] = tmp;
+                permutation |= 2;
+            }
+            if (rgb[2] < rgb[1]) {
+                tmp = rgb[2];
+                rgb[2] = rgb[1];
+                rgb[1] = tmp;
+                permutation |= 4;
+            }
+
+            float min = rgb[0];
+            float max = rgb[2];
+
+            // Apply tonemapping curve to min, max RGB channel values
+            min = (float) Math.pow(min, 3.f) * mToneMapCoeffs[0]
+                + (float) Math.pow(min, 2.f) * mToneMapCoeffs[1]
+                + (float) /*Math.pow(min, 1.f)*/min * mToneMapCoeffs[2]
+                + (float) /*Math.pow(min, 0.f)*/1.0 * mToneMapCoeffs[3];
+
+            max = (float) Math.pow(max, 3.f) * mToneMapCoeffs[0]
+                + (float) Math.pow(max, 2.f) * mToneMapCoeffs[1]
+                + (float) /*Math.pow(max, 1.f)*/max * mToneMapCoeffs[2]
+                + (float) /*Math.pow(max, 0.f)*/1.0 * mToneMapCoeffs[3];
+
+            // Rescale middle value
+            float newMid;
+            if (rgb[2] == rgb[0]) {
+                newMid = max;
+            } else {
+                newMid = min + (max - min) * (rgb[1] - rgb[0]) / (rgb[2] - rgb[0]);
+            }
+
+            switch (permutation) {
+                // b >= g >= r
+                case 0 : {
+                    rgb[0] = min;
+                    rgb[1] = newMid;
+                    rgb[2] = max;
+                    break;
+                }
+                // g >= b >= r
+                case 1 : {
+                    rgb[0] = min;
+                    rgb[2] = newMid;
+                    rgb[1] = max;
+                    break;
+                }
+                // b >= r >= g
+                case 2 : {
+                    rgb[1] = min;
+                    rgb[0] = newMid;
+                    rgb[2] = max;
+                    break;
+                }
+                // g >= r >= b
+                case 3 : {
+                    rgb[2] = min;
+                    rgb[0] = newMid;
+                    rgb[1] = max;
+                    break;
+                }
+                // r >= b >= g
+                case 6 : {
+                    rgb[1] = min;
+                    rgb[2] = newMid;
+                    rgb[0] = max;
+                    break;
+                }
+                // r >= g >= b
+                case 7 : {
+                    rgb[2] = min;
+                    rgb[1] = newMid;
+                    rgb[0] = max;
+                    break;
+                }
+                case 4 : // impossible
+                case 5 : // impossible
+                default : {
+                    rgb[0] = 0.f;
+                    rgb[1] = 0.f;
+                    rgb[2] = 0.f;
+                    throw new IllegalStateException("RawConverter: Logic error in tonemap.");
+                }
+            }
+
+            rgb[0] = clamp(rgb[0], 0.f, 1.f);
+            rgb[1] = clamp(rgb[1], 0.f, 1.f);
+            rgb[2] = clamp(rgb[2], 0.f, 1.f);
+
+            return rgb;
+        }
+
+        private float getInput(int x, int y) {
+            // 16-bit raw pixels (big endian)
+            return (Byte.toUnsignedInt(mInput[y * mInputStride + 2 * x + 1]) << 8)
+                + Byte.toUnsignedInt(mInput[y * mInputStride + 2 * x]);
+        }
+
+        // Load a 3x3 patch of pixels into the output.
+        private void load3x3(int x, int y, /*out*/float[/*9*/] outputArray) {
+            outputArray[0] = getInput(x - 1, y - 1);
+            outputArray[1] = getInput(x, y - 1);
+            outputArray[2] = getInput(x + 1, y - 1);
+            outputArray[3] = getInput(x - 1, y);
+            outputArray[4] = getInput(x, y);
+            outputArray[5] = getInput(x + 1, y);
+            outputArray[6] = getInput(x - 1, y + 1);
+            outputArray[7] = getInput(x, y + 1);
+            outputArray[8] = getInput(x + 1, y + 1);
+        }
+
+        // Blacklevel subtract, and normalize each pixel in the outputArray, and apply the
+        // gain map.
+        void linearizeAndGainmap(int x, int y, /*inout*/float[/*9*/] outputArray) {
+            int kk = 0;
+            for (int j = y - 1; j <= y + 1; j++) {
+                for (int i = x - 1; i <= x + 1; i++) {
+                    int index = (i & 1) | ((j & 1) << 1);  // bits [0,1] are blacklevel offset
+                    index |= (mCfaPattern << 2);  // bits [2,3] are cfa
+                    float bl = 0.f;
+                    float g = 1.f;
+                    float[/*4*/] gains = new float[]{1.f, 1.f, 1.f, 1.f};
+                    if (mHasGainMap) {
+                        gains = getGain(i, j);
+                    }
+                    switch (index) {
+                        // RGGB
+                        case 0 : {
+                            bl = mBlackLevel[0];
+                            g = gains[0];
+                            break;
+                        }
+                        case 1 : {
+                            bl = mBlackLevel[1];
+                            g = gains[1];
+                            break;
+                        }
+                        case 2 : {
+                            bl = mBlackLevel[2];
+                            g = gains[2];
+                            break;
+                        }
+                        case 3 : {
+                            bl = mBlackLevel[3];
+                            g = gains[3];
+                            break;
+                        }
+                        // GRBG
+                        case 4 : {
+                            bl = mBlackLevel[0];
+                            g = gains[1];
+                            break;
+                        }
+                        case 5 : {
+                            bl = mBlackLevel[1];
+                            g = gains[0];
+                            break;
+                        }
+                        case 6 : {
+                            bl = mBlackLevel[2];
+                            g = gains[3];
+                            break;
+                        }
+                        case 7 : {
+                            bl = mBlackLevel[3];
+                            g = gains[2];
+                            break;
+                        }
+                        // GBRG
+                        case 8 : {
+                            bl = mBlackLevel[0];
+                            g = gains[1];
+                            break;
+                        }
+                        case 9 : {
+                            bl = mBlackLevel[1];
+                            g = gains[3];
+                            break;
+                        }
+                        case 10 : {
+                            bl = mBlackLevel[2];
+                            g = gains[0];
+                            break;
+                        }
+                        case 11 : {
+                            bl = mBlackLevel[3];
+                            g = gains[2];
+                            break;
+                        }
+                        // BGGR
+                        case 12 : {
+                            bl = mBlackLevel[0];
+                            g = gains[3];
+                            break;
+                        }
+                        case 13 : {
+                            bl = mBlackLevel[1];
+                            g = gains[1];
+                            break;
+                        }
+                        case 14 : {
+                            bl = mBlackLevel[2];
+                            g = gains[2];
+                            break;
+                        }
+                        case 15 : {
+                            bl = mBlackLevel[3];
+                            g = gains[0];
+                            break;
+                        }
+                    }
+                    outputArray[kk] = clamp(g * (outputArray[kk] - bl) / (mWhiteLevel - bl), 0, 1);
+                    kk++;
+                }
+            }
+        }
+
+        // Apply bilinear-interpolation to demosaic
+        static float[/*3*/] demosaic(int x, int y, int cfa, float[/*9*/] inputArray) {
+            int index = (x & 1) | ((y & 1) << 1);
+            index |= (cfa << 2);
+
+            float[/*3*/] pRGB = new float[3];
+            switch (index) {
+                case 0 :
+                case 5 :
+                case 10 :
+                case 15 : { // Red centered
+                    // B G B
+                    // G R G
+                    // B G B
+                    pRGB[0] = inputArray[4];
+                    pRGB[1] = (inputArray[1] + inputArray[3] + inputArray[5] + inputArray[7]) / 4;
+                    pRGB[2] = (inputArray[0] + inputArray[2] + inputArray[6] + inputArray[8]) / 4;
+                    break;
+                }
+                case 1 :
+                case 4 :
+                case 11 :
+                case 14 : { // Green centered w/ horizontally adjacent Red
+                    // G B G
+                    // R G R
+                    // G B G
+                    pRGB[0] = (inputArray[3] + inputArray[5]) / 2;
+                    pRGB[1] = inputArray[4];
+                    pRGB[2] = (inputArray[1] + inputArray[7]) / 2;
+                    break;
+                }
+                case 2 :
+                case 7 :
+                case 8 :
+                case 13 : { // Green centered w/ horizontally adjacent Blue
+                    // G R G
+                    // B G B
+                    // G R G
+                    pRGB[0] = (inputArray[1] + inputArray[7]) / 2;
+                    pRGB[1] = inputArray[4];
+                    pRGB[2] = (inputArray[3] + inputArray[5]) / 2;
+                    break;
+                }
+                case 3 :
+                case 6 :
+                case 9 :
+                case 12 : { // Blue centered
+                    // R G R
+                    // G B G
+                    // R G R
+                    pRGB[0] = (inputArray[0] + inputArray[2] + inputArray[6] + inputArray[8]) / 4;
+                    pRGB[1] = (inputArray[1] + inputArray[3] + inputArray[5] + inputArray[7]) / 4;
+                    pRGB[2] = inputArray[4];
+                    break;
+                }
+            }
+
+            return pRGB;
+        }
+
+        static int packColorTo8888(float[/*3*/] pRGB) {
+            int a = 255;
+            int r = (int) (pRGB[0] * 255);
+            int g = (int) (pRGB[1] * 255);
+            int b = (int) (pRGB[2] * 255);
+            int color = ((a & 0xff) << 24) | ((r & 0xff) << 16) | ((g & 0xff) << 8) | (b & 0xff);
+            return color;
+        }
+
+        // Full RAW->ARGB bitmap conversion kernel
+        int convert_RAW_To_ARGB(int x, int y) {
+            float[/*3*/] pRGB;
+            int xP = x + mOffsetX;
+            int yP = y + mOffsetY;
+            if (xP == 0) xP = 1;
+            if (yP == 0) yP = 1;
+            if (xP == mInputWidth - 1) xP = mInputWidth - 2;
+            if (yP == mInputHeight - 1) yP = mInputHeight  - 2;
+
+            if (mIsMonochrome) {
+                float pixel = getInput(x, y);
+
+                // Apply linearization and gain map
+                float[/*4*/] gains = new float[]{1.f, 1.f, 1.f, 1.f};
+                if (mHasGainMap) {
+                    gains = getGain(xP, yP);
+                }
+                float bl = mBlackLevel[0];
+                float g = gains[0];
+                pixel = clamp(g * (pixel - bl) / (mWhiteLevel - bl), 0.f, 1.f);
+
+                // Use same Y value for R, G, and B.
+                pRGB = new float[3];
+                pRGB[0] = pRGB[1] = pRGB[2] = pixel;
+
+                // apply tonemap and gamma correction
+                tonemap(pRGB);
+                gammaCorrectPixel(pRGB);
+            } else {
+                float[] patch = new float[9];
+                // TODO: Once ScriptGroup and RS kernels have been updated to allow for iteration
+                // over 3x3 pixel patches, this can be optimized to avoid re-applying the
+                // pre-demosaic steps for each pixel, potentially achieving a 9x speedup here.
+                load3x3(xP, yP, /*out*/ patch);
+                linearizeAndGainmap(xP, yP, /*inout*/patch);
+                pRGB = demosaic(xP, yP, mCfaPattern, patch);
+                applyColorspace(pRGB);
+            }
+
+            return packColorTo8888(pRGB);
+        }
+
+        void forEach_convert_RAW_To_ARGB(Bitmap argbOutput) {
+            for (int j = 0; j < mInputHeight; j++) {
+                for (int i = 0; i < mInputWidth; i++) {
+                    argbOutput.setPixel(i, j, convert_RAW_To_ARGB(i, j));
+                }
+            }
+        }
+
+    }
+
     /**
      * Convert a RAW16 buffer into an sRGB buffer, and write the result into a bitmap.
      *
@@ -259,7 +842,6 @@
      * <p> Arguments given here are assumed to come from the values for the corresponding
      * {@link CameraCharacteristics.Key}s defined for the camera that produced this RAW16 buffer.
      * </p>
-     * @param rs a {@link RenderScript} context to use.
      * @param inputWidth width of the input RAW16 image in pixels.
      * @param inputHeight height of the input RAW16 image in pixels.
      * @param inputStride stride of the input RAW16 image in bytes.
@@ -275,7 +857,7 @@
      *                   the dimensions and offset of the output rectangle contained in the RAW
      *                   image to be rendered.
      */
-    public static void convertToSRGB(RenderScript rs, int inputWidth, int inputHeight,
+    public static void convertToSRGB(int inputWidth, int inputHeight,
             int inputStride, byte[] rawImageInput, CameraCharacteristics staticMetadata,
             CaptureResult dynamicMetadata, int outputOffsetX, int outputOffsetY,
             /*out*/Bitmap argbOutput) {
@@ -294,7 +876,7 @@
         if (!isMono) {
             dngBayerMetadata = new DngBayerMetadata(staticMetadata, dynamicMetadata);
         }
-        convertToSRGB(rs, inputWidth, inputHeight, inputStride, cfa, blackLevelPattern,
+        convertToSRGB(inputWidth, inputHeight, inputStride, cfa, blackLevelPattern,
                 whiteLevel, rawImageInput, dngBayerMetadata,
                 shadingMap, outputOffsetX, outputOffsetY, argbOutput);
     }
@@ -304,13 +886,13 @@
      *
      * @see #convertToSRGB
      */
-    private static void convertToSRGB(RenderScript rs, int inputWidth, int inputHeight,
+    private static void convertToSRGB(int inputWidth, int inputHeight,
             int inputStride, int cfa, int[] blackLevelPattern, int whiteLevel, byte[] rawImageInput,
             DngBayerMetadata dngBayerMetadata, LensShadingMap lensShadingMap,
             int outputOffsetX, int outputOffsetY, /*out*/Bitmap argbOutput) {
 
         // Validate arguments
-        if (argbOutput == null || rs == null || rawImageInput == null) {
+        if (argbOutput == null || rawImageInput == null) {
             throw new IllegalArgumentException("Null argument to convertToSRGB");
         }
         if (argbOutput.getConfig() != Bitmap.Config.ARGB_8888) {
@@ -346,14 +928,6 @@
             Log.d(TAG, "WhiteLevel: " + whiteLevel);
         }
 
-        Allocation gainMap = null;
-        if (lensShadingMap != null) {
-            float[] lsm = new float[lensShadingMap.getGainFactorCount()];
-            lensShadingMap.copyGainFactors(/*inout*/lsm, /*offset*/0);
-            gainMap = createFloat4Allocation(rs, lsm, lensShadingMap.getColumnCount(),
-                    lensShadingMap.getRowCount());
-        }
-
         float[] sensorToProPhoto = new float[9];
         float[] proPhotoToSRGB = new float[9];
         if (dngBayerMetadata != null) {
@@ -420,73 +994,37 @@
             multiply(sXYZtoRGBBradford, sProPhotoToXYZ, /*out*/proPhotoToSRGB);
         }
 
-        Allocation output = Allocation.createFromBitmap(rs, argbOutput);
-
-        // Setup input allocation (16-bit raw pixels)
-        Type.Builder typeBuilder = new Type.Builder(rs, Element.U16(rs));
-        typeBuilder.setX((inputStride / 2));
-        typeBuilder.setY(inputHeight);
-        Type inputType = typeBuilder.create();
-        Allocation input = Allocation.createTyped(rs, inputType);
-        input.copyFromUnchecked(rawImageInput);
-
-        // Setup RS kernel globals
-        ScriptC_raw_converter converterKernel = new ScriptC_raw_converter(rs);
-        converterKernel.set_inputRawBuffer(input);
+        ConverterKernel converterKernel = new ConverterKernel();
+        converterKernel.set_inputRawBuffer(rawImageInput);
         converterKernel.set_whiteLevel(whiteLevel);
         converterKernel.set_offsetX(outputOffsetX);
         converterKernel.set_offsetY(outputOffsetY);
         converterKernel.set_rawHeight(inputHeight);
         converterKernel.set_rawWidth(inputWidth);
-        converterKernel.set_toneMapCoeffs(new Float4(DEFAULT_ACR3_TONEMAP_CURVE_COEFFS[0],
-                DEFAULT_ACR3_TONEMAP_CURVE_COEFFS[1], DEFAULT_ACR3_TONEMAP_CURVE_COEFFS[2],
-                DEFAULT_ACR3_TONEMAP_CURVE_COEFFS[3]));
-        converterKernel.set_hasGainMap(gainMap != null);
-        if (gainMap != null) {
-            converterKernel.set_gainMap(gainMap);
+        converterKernel.set_rawStride(inputStride);
+        converterKernel.set_toneMapCoeffs(DEFAULT_ACR3_TONEMAP_CURVE_COEFFS);
+        converterKernel.set_hasGainMap(lensShadingMap != null);
+        if (lensShadingMap != null) {
+            float[] gainMap = new float[lensShadingMap.getGainFactorCount()];
+            lensShadingMap.copyGainFactors(/*inout*/gainMap, /*offset*/0);
             converterKernel.set_gainMapWidth(lensShadingMap.getColumnCount());
             converterKernel.set_gainMapHeight(lensShadingMap.getRowCount());
+            converterKernel.set_gainMap(gainMap);
         }
 
         converterKernel.set_isMonochrome(dngBayerMetadata == null);
         if (dngBayerMetadata != null) {
-            converterKernel.set_sensorToIntermediate(new Matrix3f(transpose(sensorToProPhoto)));
-            converterKernel.set_intermediateToSRGB(new Matrix3f(transpose(proPhotoToSRGB)));
+            converterKernel.set_sensorToIntermediate(sensorToProPhoto);
+            converterKernel.set_intermediateToSRGB(proPhotoToSRGB);
             converterKernel.set_neutralPoint(
-                    new Float3(dngBayerMetadata.neutralColorPoint[0].floatValue(),
+                    new float[]{dngBayerMetadata.neutralColorPoint[0].floatValue(),
                     dngBayerMetadata.neutralColorPoint[1].floatValue(),
-                    dngBayerMetadata.neutralColorPoint[2].floatValue()));
+                    dngBayerMetadata.neutralColorPoint[2].floatValue()});
         }
 
         converterKernel.set_cfaPattern(cfa);
-        converterKernel.set_blackLevelPattern(new Int4(blackLevelPattern[0],
-                blackLevelPattern[1], blackLevelPattern[2], blackLevelPattern[3]));
-        converterKernel.forEach_convert_RAW_To_ARGB(output);
-        output.copyTo(argbOutput);  // Force RS sync with bitmap (does not do an extra copy).
-    }
-
-    /**
-     * Create a float-backed renderscript {@link Allocation} with the given dimensions, containing
-     * the contents of the given float array.
-     *
-     * @param rs a {@link RenderScript} context to use.
-     * @param fArray the float array to copy into the {@link Allocation}.
-     * @param width the width of the {@link Allocation}.
-     * @param height the height of the {@link Allocation}.
-     * @return an {@link Allocation} containing the given floats.
-     */
-    private static Allocation createFloat4Allocation(RenderScript rs, float[] fArray,
-                                                    int width, int height) {
-        if (fArray.length != width * height * 4) {
-            throw new IllegalArgumentException("Invalid float array of length " + fArray.length +
-                    ", must be correct size for Allocation of dimensions " + width + "x" + height);
-        }
-        Type.Builder builder = new Type.Builder(rs, Element.F32_4(rs));
-        builder.setX(width);
-        builder.setY(height);
-        Allocation fAlloc = Allocation.createTyped(rs, builder.create());
-        fAlloc.copyFrom(fArray);
-        return fAlloc;
+        converterKernel.set_blackLevelPattern(blackLevelPattern);
+        converterKernel.forEach_convert_RAW_To_ARGB(argbOutput);
     }
 
     /**
diff --git a/tests/camera/src/android/hardware/camera2/cts/rs/raw_converter.rscript b/tests/camera/src/android/hardware/camera2/cts/rs/raw_converter.rscript
deleted file mode 100644
index 6b2ad0c..0000000
--- a/tests/camera/src/android/hardware/camera2/cts/rs/raw_converter.rscript
+++ /dev/null
@@ -1,394 +0,0 @@
-/*
- * Copyright 2015 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "../common.rscript"
-
-// This file includes a conversion kernel for RGGB, GRBG, GBRG, and BGGR Bayer patterns.
-// Applying this script also will apply black-level subtraction, rescaling, clipping, tonemapping,
-// and color space transforms along with the Bayer demosaic.  See RawConverter.java
-// for more information.
-
-// Input globals
-
-rs_allocation inputRawBuffer; // RAW16 buffer of dimensions (raw image stride) * (raw image height)
-rs_allocation gainMap; // Gainmap to apply to linearized raw sensor data.
-uint cfaPattern; // The Color Filter Arrangement pattern used
-uint gainMapWidth;  // The width of the gain map
-uint gainMapHeight;  // The height of the gain map
-bool hasGainMap; // Does gainmap exist?
-bool isMonochrome;  // Is monochrome camera?
-rs_matrix3x3 sensorToIntermediate; // Color transform from sensor to a wide-gamut colorspace
-rs_matrix3x3 intermediateToSRGB; // Color transform from wide-gamut colorspace to sRGB
-ushort4 blackLevelPattern; // Blacklevel to subtract for each channel, given in CFA order
-int whiteLevel;  // Whitelevel of sensor
-uint offsetX; // X offset into inputRawBuffer
-uint offsetY; // Y offset into inputRawBuffer
-uint rawWidth; // Width of raw buffer
-uint rawHeight; // Height of raw buffer
-float3 neutralPoint; // The camera neutral
-float4 toneMapCoeffs; // Coefficients for a polynomial tonemapping curve
-
-// Interpolate gain map to find per-channel gains at a given pixel
-static float4 getGain(uint x, uint y) {
-    float interpX = (((float) x) / rawWidth) * gainMapWidth;
-    float interpY = (((float) y) / rawHeight) * gainMapHeight;
-    uint gX = (uint) interpX;
-    uint gY = (uint) interpY;
-    uint gXNext = (gX + 1 < gainMapWidth) ? gX + 1 : gX;
-    uint gYNext = (gY + 1 < gainMapHeight) ? gY + 1 : gY;
-
-    float4 tl = *((float4 *) rsGetElementAt(gainMap, gX, gY));
-    float4 tr = *((float4 *) rsGetElementAt(gainMap, gXNext, gY));
-    float4 bl = *((float4 *) rsGetElementAt(gainMap, gX, gYNext));
-    float4 br = *((float4 *) rsGetElementAt(gainMap, gXNext, gYNext));
-
-    float fracX = interpX - (float) gX;
-    float fracY = interpY - (float) gY;
-    float invFracX = 1.f - fracX;
-    float invFracY = 1.f - fracY;
-
-    return tl * invFracX * invFracY + tr * fracX * invFracY +
-            bl * invFracX * fracY + br * fracX * fracY;
-}
-
-// Apply gamma correction using sRGB gamma curve
-static float gammaEncode(float x) {
-    return (x <= 0.0031308f) ? x * 12.92f : 1.055f * pow(x, 0.4166667f) - 0.055f;
-}
-
-// Apply gamma correction to each color channel in RGB pixel
-static float3 gammaCorrectPixel(float3 rgb) {
-    float3 ret;
-    ret.x = gammaEncode(rgb.x);
-    ret.y = gammaEncode(rgb.y);
-    ret.z = gammaEncode(rgb.z);
-    return ret;
-}
-
-// Apply polynomial tonemapping curve to each color channel in RGB pixel.
-// This attempts to apply tonemapping without changing the hue of each pixel,
-// i.e.:
-//
-// For some RGB values:
-// M = max(R, G, B)
-// m = min(R, G, B)
-// m' = mid(R, G, B)
-// chroma = M - m
-// H = m' - m / chroma
-//
-// The relationship H=H' should be preserved, where H and H' are calculated from
-// the RGB and RGB' value at this pixel before and after this tonemapping
-// operation has been applied, respectively.
-static float3 tonemap(float3 rgb) {
-    float3 sorted = clamp(rgb, 0.f, 1.f);
-    float tmp;
-    int permutation = 0;
-
-    // Sort the RGB channels by value
-    if (sorted.z < sorted.y) {
-        tmp = sorted.z;
-        sorted.z = sorted.y;
-        sorted.y = tmp;
-        permutation |= 1;
-    }
-    if (sorted.y < sorted.x) {
-        tmp = sorted.y;
-        sorted.y = sorted.x;
-        sorted.x = tmp;
-        permutation |= 2;
-    }
-    if (sorted.z < sorted.y) {
-        tmp = sorted.z;
-        sorted.z = sorted.y;
-        sorted.y = tmp;
-        permutation |= 4;
-    }
-
-    float2 minmax;
-    minmax.x = sorted.x;
-    minmax.y = sorted.z;
-
-    // Apply tonemapping curve to min, max RGB channel values
-    minmax = native_powr(minmax, 3.f) * toneMapCoeffs.x +
-            native_powr(minmax, 2.f) * toneMapCoeffs.y +
-            minmax * toneMapCoeffs.z + toneMapCoeffs.w;
-
-    // Rescale middle value
-    float newMid;
-    if (sorted.z == sorted.x) {
-        newMid = minmax.y;
-    } else {
-        newMid = minmax.x + ((minmax.y - minmax.x) * (sorted.y - sorted.x) /
-                (sorted.z - sorted.x));
-    }
-
-    float3 finalRGB;
-    switch (permutation) {
-        case 0: // b >= g >= r
-            finalRGB.x = minmax.x;
-            finalRGB.y = newMid;
-            finalRGB.z = minmax.y;
-            break;
-        case 1: // g >= b >= r
-            finalRGB.x = minmax.x;
-            finalRGB.z = newMid;
-            finalRGB.y = minmax.y;
-            break;
-        case 2: // b >= r >= g
-            finalRGB.y = minmax.x;
-            finalRGB.x = newMid;
-            finalRGB.z = minmax.y;
-            break;
-        case 3: // g >= r >= b
-            finalRGB.z = minmax.x;
-            finalRGB.x = newMid;
-            finalRGB.y = minmax.y;
-            break;
-        case 6: // r >= b >= g
-            finalRGB.y = minmax.x;
-            finalRGB.z = newMid;
-            finalRGB.x = minmax.y;
-            break;
-        case 7: // r >= g >= b
-            finalRGB.z = minmax.x;
-            finalRGB.y = newMid;
-            finalRGB.x = minmax.y;
-            break;
-        case 4: // impossible
-        case 5: // impossible
-        default:
-            finalRGB.x = 0.f;
-            finalRGB.y = 0.f;
-            finalRGB.z = 0.f;
-            LOGD("raw_converter.rscript: Logic error in tonemap.", 0);
-            break;
-    }
-    return clamp(finalRGB, 0.f, 1.f);
-}
-
-// Apply a colorspace transform to the intermediate colorspace, apply
-// a tonemapping curve, apply a colorspace transform to a final colorspace,
-// and apply a gamma correction curve.
-static float3 applyColorspace(float3 pRGB) {
-    pRGB.x = clamp(pRGB.x, 0.f, neutralPoint.x);
-    pRGB.y = clamp(pRGB.y, 0.f, neutralPoint.y);
-    pRGB.z = clamp(pRGB.z, 0.f, neutralPoint.z);
-
-    float3 intermediate = rsMatrixMultiply(&sensorToIntermediate, pRGB);
-    intermediate = tonemap(intermediate);
-    return gammaCorrectPixel(clamp(rsMatrixMultiply(&intermediateToSRGB, intermediate), 0.f, 1.f));
-}
-
-// Load a 3x3 patch of pixels into the output.
-static void load3x3(uint x, uint y, rs_allocation buf, /*out*/float* outputArray) {
-    outputArray[0] = *((ushort *) rsGetElementAt(buf, x - 1, y - 1));
-    outputArray[1] = *((ushort *) rsGetElementAt(buf, x, y - 1));
-    outputArray[2] = *((ushort *) rsGetElementAt(buf, x + 1, y - 1));
-    outputArray[3] = *((ushort *) rsGetElementAt(buf, x - 1, y));
-    outputArray[4] = *((ushort *) rsGetElementAt(buf, x, y));
-    outputArray[5] = *((ushort *) rsGetElementAt(buf, x + 1, y));
-    outputArray[6] = *((ushort *) rsGetElementAt(buf, x - 1, y + 1));
-    outputArray[7] = *((ushort *) rsGetElementAt(buf, x, y + 1));
-    outputArray[8] = *((ushort *) rsGetElementAt(buf, x + 1, y + 1));
-}
-
-// Blacklevel subtract, and normalize each pixel in the outputArray, and apply the
-// gain map.
-static void linearizeAndGainmap(uint x, uint y, ushort4 blackLevel, int whiteLevel,
-        uint cfa, /*inout*/float* outputArray) {
-    uint kk = 0;
-    for (uint j = y - 1; j <= y + 1; j++) {
-        for (uint i = x - 1; i <= x + 1; i++) {
-            uint index = (i & 1) | ((j & 1) << 1);  // bits [0,1] are blacklevel offset
-            index |= (cfa << 2);  // bits [2,3] are cfa
-            float bl = 0.f;
-            float g = 1.f;
-            float4 gains = 1.f;
-            if (hasGainMap) {
-                gains = getGain(i, j);
-            }
-            switch (index) {
-                // RGGB
-                case 0:
-                    bl = blackLevel.x;
-                    g = gains.x;
-                    break;
-                case 1:
-                    bl = blackLevel.y;
-                    g = gains.y;
-                    break;
-                case 2:
-                    bl = blackLevel.z;
-                    g = gains.z;
-                    break;
-                case 3:
-                    bl = blackLevel.w;
-                    g = gains.w;
-                    break;
-                // GRBG
-                case 4:
-                    bl = blackLevel.x;
-                    g = gains.y;
-                    break;
-                case 5:
-                    bl = blackLevel.y;
-                    g = gains.x;
-                    break;
-                case 6:
-                    bl = blackLevel.z;
-                    g = gains.w;
-                    break;
-                case 7:
-                    bl = blackLevel.w;
-                    g = gains.z;
-                    break;
-                // GBRG
-                case 8:
-                    bl = blackLevel.x;
-                    g = gains.y;
-                    break;
-                case 9:
-                    bl = blackLevel.y;
-                    g = gains.w;
-                    break;
-                case 10:
-                    bl = blackLevel.z;
-                    g = gains.x;
-                    break;
-                case 11:
-                    bl = blackLevel.w;
-                    g = gains.z;
-                    break;
-                // BGGR
-                case 12:
-                    bl = blackLevel.x;
-                    g = gains.w;
-                    break;
-                case 13:
-                    bl = blackLevel.y;
-                    g = gains.y;
-                    break;
-                case 14:
-                    bl = blackLevel.z;
-                    g = gains.z;
-                    break;
-                case 15:
-                    bl = blackLevel.w;
-                    g = gains.x;
-                    break;
-            }
-            outputArray[kk] = clamp(g * (outputArray[kk] - bl) / (whiteLevel - bl), 0.f, 1.f);
-            kk++;
-        }
-    }
-}
-
-// Apply bilinear-interpolation to demosaic
-static float3 demosaic(uint x, uint y, uint cfa, float* inputArray) {
-    uint index = (x & 1) | ((y & 1) << 1);
-    index |= (cfa << 2);
-    float3 pRGB;
-    switch (index) {
-        case 0:
-        case 5:
-        case 10:
-        case 15:  // Red centered
-                  // B G B
-                  // G R G
-                  // B G B
-            pRGB.x = inputArray[4];
-            pRGB.y = (inputArray[1] + inputArray[3] + inputArray[5] + inputArray[7]) / 4;
-            pRGB.z = (inputArray[0] + inputArray[2] + inputArray[6] + inputArray[8]) / 4;
-            break;
-        case 1:
-        case 4:
-        case 11:
-        case 14: // Green centered w/ horizontally adjacent Red
-                 // G B G
-                 // R G R
-                 // G B G
-            pRGB.x = (inputArray[3] + inputArray[5]) / 2;
-            pRGB.y = inputArray[4];
-            pRGB.z = (inputArray[1] + inputArray[7]) / 2;
-            break;
-        case 2:
-        case 7:
-        case 8:
-        case 13: // Green centered w/ horizontally adjacent Blue
-                 // G R G
-                 // B G B
-                 // G R G
-            pRGB.x = (inputArray[1] + inputArray[7]) / 2;
-            pRGB.y = inputArray[4];
-            pRGB.z = (inputArray[3] + inputArray[5]) / 2;
-            break;
-        case 3:
-        case 6:
-        case 9:
-        case 12: // Blue centered
-                 // R G R
-                 // G B G
-                 // R G R
-            pRGB.x = (inputArray[0] + inputArray[2] + inputArray[6] + inputArray[8]) / 4;
-            pRGB.y = (inputArray[1] + inputArray[3] + inputArray[5] + inputArray[7]) / 4;
-            pRGB.z = inputArray[4];
-            break;
-    }
-
-    return pRGB;
-}
-
-// Full RAW->ARGB bitmap conversion kernel
-uchar4 RS_KERNEL convert_RAW_To_ARGB(uint x, uint y) {
-    float3 pRGB;
-    uint xP = x + offsetX;
-    uint yP = y + offsetY;
-    if (xP == 0) xP = 1;
-    if (yP == 0) yP = 1;
-    if (xP == rawWidth - 1) xP = rawWidth - 2;
-    if (yP == rawHeight - 1) yP = rawHeight  - 2;
-
-    if (isMonochrome) {
-        float pixel = *((ushort *) rsGetElementAt(inputRawBuffer, x, y));
-
-        // Apply linearization and gain map
-        float4 gains = 1.f;
-        if (hasGainMap) {
-            gains = getGain(xP, yP);
-        }
-        float bl = blackLevelPattern.x;
-        float g = gains.x;
-        pixel = clamp(g * (pixel - bl) / (whiteLevel - bl), 0.f, 1.f);
-
-        // Use same Y value for R, G, and B.
-        pRGB.x = pRGB.y = pRGB.z = pixel;
-
-        // apply tonemap and gamma correction
-        pRGB = tonemap(pRGB);
-        pRGB = gammaCorrectPixel(pRGB);
-    } else {
-        float patch[9];
-        // TODO: Once ScriptGroup and RS kernels have been updated to allow for iteration over 3x3 pixel
-        // patches, this can be optimized to avoid re-applying the pre-demosaic steps for each pixel,
-        // potentially achieving a 9x speedup here.
-        load3x3(xP, yP, inputRawBuffer, /*out*/ patch);
-        linearizeAndGainmap(xP, yP, blackLevelPattern, whiteLevel, cfaPattern, /*inout*/patch);
-        pRGB = demosaic(xP, yP, cfaPattern, patch);
-        pRGB = applyColorspace(pRGB);
-    }
-
-    return rsPackColorTo8888(pRGB);
-}
diff --git a/tests/devicepolicy/src/android/devicepolicy/cts/PreferentialNetworkServiceTest.java b/tests/devicepolicy/src/android/devicepolicy/cts/PreferentialNetworkServiceTest.java
index 8d6d421..91a6fd4 100644
--- a/tests/devicepolicy/src/android/devicepolicy/cts/PreferentialNetworkServiceTest.java
+++ b/tests/devicepolicy/src/android/devicepolicy/cts/PreferentialNetworkServiceTest.java
@@ -409,6 +409,30 @@
     }
 
     @CanSetPolicyTest(policy = PreferentialNetworkService.class)
+    public void setPrefentialNetworkServiceConfigs_overlappingUids_throwsException() {
+        UserHandle user = UserHandle.of(sContext.getUserId());
+        final int currentUid = user.getUid(/* appId */ 0);
+        PreferentialNetworkServiceConfig slice1Config =
+                (new PreferentialNetworkServiceConfig.Builder())
+                        .setEnabled(true)
+                        .setNetworkId(PreferentialNetworkServiceConfig.PREFERENTIAL_NETWORK_ID_1)
+                        .setIncludedUids(new int[]{currentUid})
+                        .build();
+        PreferentialNetworkServiceConfig slice2Config =
+                (new PreferentialNetworkServiceConfig.Builder())
+                        .setEnabled(true)
+                        .setNetworkId(PreferentialNetworkServiceConfig.PREFERENTIAL_NETWORK_ID_2)
+                        .setIncludedUids(new int[]{currentUid})
+                        .build();
+        assertThrows(IllegalArgumentException.class,
+                () -> sDeviceState.dpc().devicePolicyManager()
+                        .setPreferentialNetworkServiceConfigs(
+                                List.of(slice1Config, slice2Config)));
+        assertThat(sDeviceState.dpc().devicePolicyManager()
+            .getPreferentialNetworkServiceConfigs().get(0).isEnabled()).isFalse();
+    }
+
+    @CanSetPolicyTest(policy = PreferentialNetworkService.class)
     public void setPreferentialNetworkServiceConfigs_default_isNotSet() {
         sDeviceState.dpc().devicePolicyManager().setPreferentialNetworkServiceConfigs(
                 List.of(PreferentialNetworkServiceConfig.DEFAULT));
diff --git a/tests/media/common/src/android/mediav2/common/cts/EncoderProfileLevelTestBase.java b/tests/media/common/src/android/mediav2/common/cts/EncoderProfileLevelTestBase.java
index 329dd76..0d97c15 100644
--- a/tests/media/common/src/android/mediav2/common/cts/EncoderProfileLevelTestBase.java
+++ b/tests/media/common/src/android/mediav2/common/cts/EncoderProfileLevelTestBase.java
@@ -31,12 +31,56 @@
 
 import java.io.IOException;
 import java.nio.ByteBuffer;
+import java.util.HashMap;
 
 /**
  * Wrapper class for testing encoders support for profile and level
  */
 public class EncoderProfileLevelTestBase extends CodecEncoderTestBase {
     private static final String LOG_TAG = EncoderProfileLevelTestBase.class.getSimpleName();
+    private static final int[] AVC_LEVELS =
+            new int[]{AVCLevel1, AVCLevel1b, AVCLevel11, AVCLevel12, AVCLevel13, AVCLevel2,
+                    AVCLevel21, AVCLevel22, AVCLevel3, AVCLevel31, AVCLevel32, AVCLevel4,
+                    AVCLevel41, AVCLevel42, AVCLevel5, AVCLevel51, AVCLevel52, AVCLevel6,
+                    AVCLevel61, AVCLevel62};
+    private static final int[] MPEG2_LEVELS =
+            new int[]{MPEG2LevelLL, MPEG2LevelML, MPEG2LevelH14, MPEG2LevelHL, MPEG2LevelHP};
+    private static final int[] MPEG4_LEVELS =
+            new int[]{MPEG4Level0, MPEG4Level0b, MPEG4Level1, MPEG4Level2, MPEG4Level3,
+                    MPEG4Level3b, MPEG4Level4, MPEG4Level4a, MPEG4Level5, MPEG4Level6};
+    private static final int[] VP9_LEVELS =
+            new int[]{VP9Level1, VP9Level11, VP9Level2, VP9Level21, VP9Level3, VP9Level31,
+                    VP9Level4, VP9Level41, VP9Level5, VP9Level51, VP9Level52, VP9Level6,
+                    VP9Level61, VP9Level62};
+    private static final int[] H263_LEVELS =
+            new int[]{H263Level10, H263Level20, H263Level30, H263Level40, H263Level45,
+                    H263Level50, H263Level60, H263Level70};
+    private static final int[] AV1_LEVELS =
+            new int[]{AV1Level2, AV1Level21, AV1Level3, AV1Level31, AV1Level4, AV1Level41,
+                    AV1Level5, AV1Level51, AV1Level52, AV1Level53, AV1Level6, AV1Level61,
+                    AV1Level62, AV1Level63};
+    private static final int[] HEVC_LEVELS =
+            new int[]{HEVCMainTierLevel1, HEVCHighTierLevel1, HEVCMainTierLevel2,
+                    HEVCHighTierLevel2, HEVCMainTierLevel21, HEVCHighTierLevel21,
+                    HEVCMainTierLevel3, HEVCHighTierLevel3, HEVCMainTierLevel31,
+                    HEVCHighTierLevel31, HEVCMainTierLevel4, HEVCHighTierLevel4,
+                    HEVCMainTierLevel41, HEVCHighTierLevel41, HEVCMainTierLevel5,
+                    HEVCHighTierLevel5, HEVCMainTierLevel51, HEVCHighTierLevel51,
+                    HEVCMainTierLevel52, HEVCHighTierLevel52, HEVCMainTierLevel6,
+                    HEVCHighTierLevel6, HEVCHighTierLevel61, HEVCHighTierLevel62,
+                    HEVCMainTierLevel61, HEVCMainTierLevel62};
+
+    public static final HashMap<String, int[]> LEVEL_MAP = new HashMap<>();
+
+    static {
+        LEVEL_MAP.put(MediaFormat.MIMETYPE_VIDEO_AVC, AVC_LEVELS);
+        LEVEL_MAP.put(MediaFormat.MIMETYPE_VIDEO_MPEG2, MPEG2_LEVELS);
+        LEVEL_MAP.put(MediaFormat.MIMETYPE_VIDEO_MPEG4, MPEG4_LEVELS);
+        LEVEL_MAP.put(MediaFormat.MIMETYPE_VIDEO_VP9, VP9_LEVELS);
+        LEVEL_MAP.put(MediaFormat.MIMETYPE_VIDEO_H263, H263_LEVELS);
+        LEVEL_MAP.put(MediaFormat.MIMETYPE_VIDEO_HEVC, HEVC_LEVELS);
+        LEVEL_MAP.put(MediaFormat.MIMETYPE_VIDEO_AV1, AV1_LEVELS);
+    }
 
     private static int divUp(int num, int den) {
         return (num + den - 1) / den;
diff --git a/tests/media/src/android/mediav2/cts/EncoderLevelTest.java b/tests/media/src/android/mediav2/cts/EncoderLevelTest.java
new file mode 100644
index 0000000..e7035aa
--- /dev/null
+++ b/tests/media/src/android/mediav2/cts/EncoderLevelTest.java
@@ -0,0 +1,207 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.mediav2.cts;
+
+import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible;
+import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatYUVP010;
+import static android.mediav2.cts.EncoderInput.getRawResource;
+
+import static org.junit.Assert.assertNotNull;
+
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo;
+import android.media.MediaFormat;
+import android.mediav2.common.cts.CodecTestBase;
+import android.mediav2.common.cts.EncoderConfigParams;
+import android.mediav2.common.cts.EncoderProfileLevelTestBase;
+import android.mediav2.common.cts.OutputManager;
+
+import com.android.compatibility.common.util.ApiTest;
+
+import org.junit.Assume;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.List;
+import java.util.Objects;
+
+/**
+ * The purpose of this test is to check video encoders behaviour towards level key.
+ * <p>
+ * According to the documentation
+ * <a href="https://developer.android.com/reference/android/media/MediaFormat#KEY_LEVEL
+ * ">KEY_LEVEL</a>, cannot be used to constrain the encoder's output to a maximum encoding level.
+ * Encoders are free to target a different level if the other configured encoding parameters
+ * dictate it. <p>
+ * The test picks an encoding configuration that is supported by the component. The test then
+ * configures KEY_LEVEL to different values. The test expects the codec to not hang or codec
+ * configure to not fail for any level value. The codec is expected to simply choose a supported
+ * level and continue with encode operation.
+ * <p>
+ * At the end of encoding process, the test enforces following checks :-
+ * <ul>
+ *     <li>The minimum PSNR of encoded output is at least the tolerance value.</li>
+ * </ul>
+ */
+@RunWith(Parameterized.class)
+public class EncoderLevelTest extends EncoderProfileLevelTestBase {
+    public EncoderLevelTest(String encoder, String mediaType, EncoderConfigParams[] encCfgParams,
+            @SuppressWarnings("unused") String testLabel, String allTestParams) {
+        super(encoder, mediaType, encCfgParams, allTestParams);
+    }
+
+    private static EncoderConfigParams[] getVideoEncoderCfgParams(String mediaType, int bitRate,
+            int width, int height, int frameRate, int colorFormat, int[] profiles, int level) {
+        ArrayList<EncoderConfigParams> cfgParams = new ArrayList<>();
+        for (int profile : profiles) {
+            cfgParams.add(new EncoderConfigParams.Builder(mediaType)
+                    .setBitRate(bitRate)
+                    .setWidth(width)
+                    .setHeight(height)
+                    .setFrameRate(frameRate)
+                    .setProfile(profile)
+                    .setLevel(level)
+                    .setColorFormat(colorFormat)
+                    .build());
+        }
+        return cfgParams.toArray(new EncoderConfigParams[0]);
+    }
+
+    @Parameterized.Parameters(name = "{index}_{0}_{1}_{3}")
+    public static Collection<Object[]> input() {
+        final boolean isEncoder = true;
+        final boolean needAudio = false;
+        final boolean needVideo = true;
+        final List<Object[]> exhaustiveArgsList = new ArrayList<>(Arrays.asList(new Object[][]{
+                // mediaType, width, height, bit-rate, frame-rate
+                {MediaFormat.MIMETYPE_VIDEO_MPEG2, 352, 288, 512000, 30},
+                {MediaFormat.MIMETYPE_VIDEO_MPEG4, 176, 144, 64000, 15},
+                {MediaFormat.MIMETYPE_VIDEO_H263, 176, 144, 64000, 15},
+                {MediaFormat.MIMETYPE_VIDEO_VP8, 352, 288, 512000, 30},
+                {MediaFormat.MIMETYPE_VIDEO_AVC, 352, 288, 512000, 30},
+                {MediaFormat.MIMETYPE_VIDEO_HEVC, 512, 512, 512000, 30},
+                {MediaFormat.MIMETYPE_VIDEO_VP9, 352, 288, 512000, 30},
+                {MediaFormat.MIMETYPE_VIDEO_AV1, 352, 288, 512000, 30},
+        }));
+        final List<Object[]> argsList = new ArrayList<>();
+        for (Object[] arg : exhaustiveArgsList) {
+            final String mediaType = (String) arg[0];
+            final int width = (int) arg[1];
+            final int height = (int) arg[2];
+            final int br = (int) arg[3];
+            final int fps = (int) arg[4];
+
+            int[] levelList = LEVEL_MAP.get(mediaType);
+            int[] actualLevelList;
+            if (levelList != null) {
+                int levelListLength = levelList.length;
+                actualLevelList = new int[levelList.length + 3];
+                actualLevelList[0] = 0;  // zero (for some media types unrecognized)
+                actualLevelList[1] = -1;  // level is not set in the format
+                actualLevelList[2] = 101;  // unrecognized level
+                System.arraycopy(levelList, 0, actualLevelList, 3, levelListLength);
+            } else {
+                actualLevelList = new int[]{0, -1, 101};
+            }
+            if (PROFILE_SDR_MAP.containsKey(mediaType)) {
+                for (int level : actualLevelList) {
+                    Object[] testArgs = new Object[3];
+                    testArgs[0] = arg[0];
+                    testArgs[1] = getVideoEncoderCfgParams(mediaType, br, width, height, fps,
+                            COLOR_FormatYUV420Flexible,
+                            Objects.requireNonNull(PROFILE_SDR_MAP.get(mediaType)), level);
+                    testArgs[2] = String.format("%dkbps_%dx%d_%dfps_%s_%d-level", br / 1000, width,
+                            height, fps, colorFormatToString(COLOR_FormatYUV420Flexible, -1),
+                            level);
+                    argsList.add(testArgs);
+                }
+            }
+            if (CodecTestBase.IS_AT_LEAST_T && PROFILE_HLG_MAP.containsKey(mediaType)) {
+                for (int level : actualLevelList) {
+                    Object[] testArgs = new Object[3];
+                    testArgs[0] = arg[0];
+                    testArgs[1] = getVideoEncoderCfgParams(mediaType, br, width, height, fps,
+                            COLOR_FormatYUVP010,
+                            Objects.requireNonNull(PROFILE_HLG_MAP.get(mediaType)), level);
+                    testArgs[2] = String.format("%dkbps_%dx%d_%dfps_%s_%d-level", br / 1000, width,
+                            height, fps, colorFormatToString(COLOR_FormatYUVP010, -1), level);
+                    argsList.add(testArgs);
+                }
+            }
+        }
+        return prepareParamList(argsList, isEncoder, needAudio, needVideo, false);
+    }
+
+    /**
+     * Check description of class {@link EncoderLevelTest}
+     */
+    @ApiTest(apis = "android.media.MediaFormat#KEY_LEVEL")
+    @Test(timeout = PER_TEST_TIMEOUT_SMALL_TEST_MS)
+    public void testVideoEncodeLevels() throws IOException, InterruptedException,
+            CloneNotSupportedException {
+        if (mEncCfgParams[0].mInputBitDepth != 8) {
+            Assume.assumeTrue(mCodecName + " doesn't support " + colorFormatToString(
+                            mEncCfgParams[0].mColorFormat, mEncCfgParams[0].mInputBitDepth),
+                    hasSupportForColorFormat(mCodecName, mMediaType,
+                            mEncCfgParams[0].mColorFormat));
+        }
+
+        mActiveRawRes = getRawResource(mEncCfgParams[0]);
+        assertNotNull("no raw resource found for testing config : "
+                + mEncCfgParams[0] + mTestConfig + mTestEnv, mActiveRawRes);
+        setUpSource(mActiveRawRes.mFileName);
+        mSaveToMem = false;
+        mMuxOutput = true;
+        mOutputBuff = new OutputManager();
+        mCodec = MediaCodec.createByCodecName(mCodecName);
+        MediaCodecInfo.CodecCapabilities codecCapabilities =
+                mCodec.getCodecInfo().getCapabilitiesForType(mMediaType);
+        for (EncoderConfigParams cfg : mEncCfgParams) {
+            // check if format is supported by the component with out configuring level key.
+            MediaFormat formatNotForUse = cfg.getFormat();
+            formatNotForUse.removeKey(MediaFormat.KEY_LEVEL);
+            if (!codecCapabilities.isFormatSupported(formatNotForUse)) {
+                continue;
+            }
+
+            // if format is supported, then bad level key must not effect encoding.
+            mActiveEncCfg = cfg;
+            mOutputBuff.reset();
+            configureCodec(cfg.getFormat(), false, true, true);
+            mCodec.start();
+            doWork(5);
+            queueEOS();
+            waitForAllOutputs();
+            mCodec.reset();
+
+            EncoderConfigParams.Builder foreman = cfg.getBuilder().clone().setLevel(
+                    EncoderProfileLevelTest.getMinLevel(cfg.mMediaType, cfg.mWidth,
+                            cfg.mHeight, cfg.mFrameRate, cfg.mBitRate, cfg.mProfile));
+            mActiveEncCfg = foreman.build();
+            validateProfileAndLevel();
+
+            validateEncodedPSNR(getRawResource(cfg), mMediaType, mMuxedOutputFile, true, false,
+                    ACCEPTABLE_WIRELESS_TX_QUALITY);
+            deleteMuxedFile();
+        }
+    }
+}
diff --git a/tests/signature/intent-check/OWNERS b/tests/signature/intent-check/OWNERS
new file mode 100644
index 0000000..a2e8fed
--- /dev/null
+++ b/tests/signature/intent-check/OWNERS
@@ -0,0 +1,2 @@
+# Bug component: 533114
+include platform/frameworks/base:/INTENT_OWNERS
diff --git a/tests/tests/app.usage/src/android/app/usage/cts/UsageStatsTest.java b/tests/tests/app.usage/src/android/app/usage/cts/UsageStatsTest.java
index 1d4aade..5b481e0 100644
--- a/tests/tests/app.usage/src/android/app/usage/cts/UsageStatsTest.java
+++ b/tests/tests/app.usage/src/android/app/usage/cts/UsageStatsTest.java
@@ -19,6 +19,8 @@
 import static android.Manifest.permission.POST_NOTIFICATIONS;
 import static android.Manifest.permission.REVOKE_POST_NOTIFICATIONS_WITHOUT_KILL;
 import static android.Manifest.permission.REVOKE_RUNTIME_PERMISSIONS;
+import static android.app.WindowConfiguration.WINDOWING_MODE_FULLSCREEN;
+import static android.app.WindowConfiguration.WINDOWING_MODE_UNDEFINED;
 import static android.app.usage.UsageStatsManager.STANDBY_BUCKET_FREQUENT;
 import static android.app.usage.UsageStatsManager.STANDBY_BUCKET_NEVER;
 import static android.app.usage.UsageStatsManager.STANDBY_BUCKET_RARE;
@@ -37,6 +39,7 @@
 import android.Manifest;
 import android.app.Activity;
 import android.app.ActivityManager;
+import android.app.ActivityOptions;
 import android.app.AppOpsManager;
 import android.app.KeyguardManager;
 import android.app.Notification;
@@ -291,10 +294,16 @@
     }
 
     private void launchSubActivity(Class<? extends Activity> clazz) {
+        launchSubActivity(clazz, WINDOWING_MODE_UNDEFINED);
+    }
+
+    private void launchSubActivity(Class<? extends Activity> clazz, int windowingMode) {
         final Intent intent = new Intent(Intent.ACTION_MAIN);
         intent.setClassName(mTargetPackage, clazz.getName());
         intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TASK | Intent.FLAG_ACTIVITY_NEW_TASK);
-        mContext.startActivity(intent);
+        final ActivityOptions options = ActivityOptions.makeBasic();
+        options.setLaunchWindowingMode(windowingMode);
+        mContext.startActivity(intent, options.toBundle());
         mUiDevice.wait(Until.hasObject(By.clazz(clazz)), TIMEOUT);
     }
 
@@ -306,7 +315,13 @@
     }
 
     private void launchTestActivity(String pkgName, String className) {
-        mContext.startActivity(createTestActivityIntent(pkgName, className));
+        launchTestActivity(pkgName, className, WINDOWING_MODE_UNDEFINED);
+    }
+
+    private void launchTestActivity(String pkgName, String className, int windowingMode) {
+        final ActivityOptions options = ActivityOptions.makeBasic();
+        options.setLaunchWindowingMode(windowingMode);
+        mContext.startActivity(createTestActivityIntent(pkgName, className), options.toBundle());
         mUiDevice.wait(Until.hasObject(By.clazz(pkgName, className)), TIMEOUT);
     }
 
@@ -562,37 +577,29 @@
         // Activity will be paused as the activities we launch might be placed on a different
         // TaskDisplayArea. Starting an activity and finishing it immediately will update the last
         // background package of the UsageStatsService regardless of the HOME Activity state.
-        launchTestActivity(TEST_APP_PKG, TEST_APP_CLASS_FINISH_SELF_ON_RESUME);
-        launchSubActivity(Activities.ActivityOne.class);
-        launchSubActivity(Activities.ActivityTwo.class);
+        // To ensure that the test is not affected by the display windowing mode, all activities are
+        // forced to launch in fullscreen mode in this test.
+        launchTestActivity(TEST_APP_PKG, TEST_APP_CLASS_FINISH_SELF_ON_RESUME,
+                WINDOWING_MODE_FULLSCREEN);
+        launchSubActivity(Activities.ActivityOne.class, WINDOWING_MODE_FULLSCREEN);
+        launchSubActivity(Activities.ActivityTwo.class, WINDOWING_MODE_FULLSCREEN);
         endTime = System.currentTimeMillis();
         events = mUsageStatsManager.queryAndAggregateUsageStats(
                 startTime, endTime);
         stats = events.get(mTargetPackage);
         assertEquals(startingCount + 1, stats.getAppLaunchCount());
-        mUiDevice.pressHome();
 
-        launchTestActivity(TEST_APP_PKG, TEST_APP_CLASS_FINISH_SELF_ON_RESUME);
-        launchSubActivity(Activities.ActivityOne.class);
-        launchSubActivity(Activities.ActivityTwo.class);
-        launchSubActivity(Activities.ActivityThree.class);
+        launchTestActivity(TEST_APP_PKG, TEST_APP_CLASS_FINISH_SELF_ON_RESUME,
+                WINDOWING_MODE_FULLSCREEN);
+        launchSubActivity(Activities.ActivityOne.class, WINDOWING_MODE_FULLSCREEN);
+        launchSubActivity(Activities.ActivityTwo.class, WINDOWING_MODE_FULLSCREEN);
+        launchSubActivity(Activities.ActivityThree.class, WINDOWING_MODE_FULLSCREEN);
         endTime = System.currentTimeMillis();
         events = mUsageStatsManager.queryAndAggregateUsageStats(
                 startTime, endTime);
         stats = events.get(mTargetPackage);
 
-        // generally applicable to single screen devices
-        int expectedUsageStatsIncrement = 2;
-        // devices that handle Apps in a multi windowing mode are unlikely to behave as defined by
-        // the single screen expectations; For example, Launcher may always be visible;
-        // consequently, the expected lifecycle will not be triggered, thus resulting in improper
-        // UsageStats values as expected for a single screen environment
-        if (Activities.startedActivities.size() > 0 &&
-                Activities.startedActivities.valueAt(0).isInMultiWindowMode()) {
-            expectedUsageStatsIncrement = 1;
-        }
-
-        assertEquals(startingCount + expectedUsageStatsIncrement, stats.getAppLaunchCount());
+        assertEquals(startingCount + 2, stats.getAppLaunchCount());
     }
 
     @AppModeFull(reason = "No usage events access in instant apps")
diff --git a/tests/tests/database/src/android/database/sqlite/cts/OWNERS b/tests/tests/database/src/android/database/sqlite/cts/OWNERS
new file mode 100644
index 0000000..e6227e0
--- /dev/null
+++ b/tests/tests/database/src/android/database/sqlite/cts/OWNERS
@@ -0,0 +1 @@
+include platform/frameworks/base:/SQLITE_OWNERS
diff --git a/tests/tests/dpi/src/android/dpi/cts/ConfigurationTest.java b/tests/tests/dpi/src/android/dpi/cts/ConfigurationTest.java
index 2333601..b6cd27d 100644
--- a/tests/tests/dpi/src/android/dpi/cts/ConfigurationTest.java
+++ b/tests/tests/dpi/src/android/dpi/cts/ConfigurationTest.java
@@ -17,7 +17,6 @@
 package android.dpi.cts;
 
 import android.content.Context;
-import android.content.pm.PackageManager;
 import android.platform.test.annotations.Presubmit;
 import android.test.AndroidTestCase;
 import android.util.DisplayMetrics;
@@ -89,6 +88,8 @@
         allowedDensities.add(DisplayMetrics.DENSITY_560);
         allowedDensities.add(DisplayMetrics.DENSITY_600);
         allowedDensities.add(DisplayMetrics.DENSITY_XXXHIGH);
+        // Backport of DENSITY_520 from Android 14 to android13-tests-dev
+        allowedDensities.add(520);
         assertTrue("DisplayMetrics.DENSITY_DEVICE_STABLE must be one of the DisplayMetrics.DENSITY_* values: "
                 + allowedDensities, allowedDensities.contains(DisplayMetrics.DENSITY_DEVICE_STABLE));
 
diff --git a/tests/tests/media/audio/src/android/media/audio/cts/VolumeShaperTest.java b/tests/tests/media/audio/src/android/media/audio/cts/VolumeShaperTest.java
index 9bdd2a0..aedfa53 100644
--- a/tests/tests/media/audio/src/android/media/audio/cts/VolumeShaperTest.java
+++ b/tests/tests/media/audio/src/android/media/audio/cts/VolumeShaperTest.java
@@ -265,12 +265,13 @@
 
     // generic player class to simplify testing
     private interface Player extends AutoCloseable {
-        public void start();
-        public void pause();
-        public void stop();
-        @Override public void close();
-        public VolumeShaper createVolumeShaper(VolumeShaper.Configuration configuration);
-        public String name();
+        void start();
+        void pause();
+        void flush();
+        void stop();
+        @Override void close();
+        VolumeShaper createVolumeShaper(VolumeShaper.Configuration configuration);
+        String name();
     }
 
     private static class AudioTrackPlayer implements Player {
@@ -287,6 +288,10 @@
             mTrack.pause();
         }
 
+        @Override public void flush() {
+            mTrack.flush();
+        }
+
         @Override public void stop() {
             mTrack.stop();
         }
@@ -308,26 +313,46 @@
         private final String mName;
     }
 
+    // State management for MediaPlayer
+    private enum State {
+        STOPPED,
+        PAUSED,
+        PLAYING,
+        CLOSED,
+    }
+
     private class MediaPlayerPlayer implements Player {
         public MediaPlayerPlayer(boolean offloaded) {
             mPlayer = createMediaPlayer(offloaded);
             mName = new String("MediaPlayer" + (offloaded ? "Offloaded" : "NonOffloaded"));
+            mState = State.STOPPED;
         }
 
         @Override public void start() {
             mPlayer.start();
+            mState = State.PLAYING;
         }
 
         @Override public void pause() {
             mPlayer.pause();
+            mState = State.PAUSED;
+        }
+
+        @Override public void flush() {
+            if (mState == State.PAUSED) {
+                // On MediaPlayer, seek can be called while playing, too.
+                mPlayer.seekTo(0 /* msec */, MediaPlayer.SEEK_PREVIOUS_SYNC);
+            }
         }
 
         @Override public void stop() {
             mPlayer.stop();
+            mState = State.STOPPED;
         }
 
         @Override public void close() {
             mPlayer.release();
+            mState = State.CLOSED;
         }
 
         @Override
@@ -341,6 +366,7 @@
 
         private final MediaPlayer mPlayer;
         private final String mName;
+        private State mState;
     }
 
     private static final int PLAYER_TYPES = 3;
@@ -1252,7 +1278,8 @@
     @LargeTest
     @Test
     public void testPlayerRunDuringPauseStop() throws Exception {
-        runTestPlayerDuringPauseStop("testPlayerRunDuringPauseStop", false /* useMediaTime */);
+        runTestPlayerDuringPauseStop("testPlayerRunDuringPauseStop",
+                false /* doFlush */, false /* useMediaTime */);
     }
 
     // tests that shaper which is based on media time will freeze
@@ -1260,11 +1287,21 @@
     @LargeTest
     @Test
     public void testPlayerFreezeDuringPauseStop() throws Exception {
-        runTestPlayerDuringPauseStop("testPlayerFreezeDuringPauseStop", true /* useMediaTime */);
+        runTestPlayerDuringPauseStop("testPlayerFreezeDuringPauseStop",
+                false /* doFlush */, true /* useMediaTime */);
+    }
+
+    // tests that shaper which is based on media time will freeze
+    // in the presence of pause and stop.
+    @LargeTest
+    @Test
+    public void testPlayerFreezeDuringPauseStopFlush() throws Exception {
+        runTestPlayerDuringPauseStop("testPlayerFreezeDuringPauseStopFlush",
+                true /* doFlush */, true /* useMediaTime */);
     }
 
     private void runTestPlayerDuringPauseStop(
-            String parentTestName, boolean useMediaTime) throws Exception {
+            String parentTestName, boolean doFlush, boolean useMediaTime) throws Exception {
         if (!hasAudioOutput()) {
             Log.w(TAG, "AUDIO_OUTPUT feature not found. This system might not have a valid "
                     + "audio output HAL");
@@ -1283,15 +1320,9 @@
                     // MediaPlayer stop requires prepare before starting.
                     continue;
                 }
-                if (useMediaTime &&  p == PLAYER_TYPE_MEDIA_PLAYER_OFFLOADED) {
-                    continue;  // Offloaded media time not supported.
-                }
-                // For this test, force non offload track for media time,
-                // as media time based offload/direct volumeshaper is not supported yet.
-                // TODO(b/236187574) - remove this requirement.
-                if (useMediaTime &&  p == PLAYER_TYPE_AUDIO_TRACK) {
-                    p = PLAYER_TYPE_AUDIO_TRACK_NON_OFFLOADED;
-                }
+
+                // Note: prior to U, offload and direct tracks used clock time
+                // not media time.
 
                 try (   Player player = createPlayer(p);
                         VolumeShaper volumeShaper = player.createVolumeShaper(config);
@@ -1313,6 +1344,9 @@
                     } else {
                         player.stop();
                     }
+                    if (doFlush) {
+                        player.flush();
+                    }
                     Log.d(TAG, testName + " volume right after " +
                             operation + " is " + volumeShaper.getVolume());
 
diff --git a/tests/tests/media/codec/src/android/media/codec/cts/MediaCodecResourceTest.java b/tests/tests/media/codec/src/android/media/codec/cts/MediaCodecResourceTest.java
index b94f2de8..33b0321 100644
--- a/tests/tests/media/codec/src/android/media/codec/cts/MediaCodecResourceTest.java
+++ b/tests/tests/media/codec/src/android/media/codec/cts/MediaCodecResourceTest.java
@@ -181,12 +181,12 @@
             for (MediaCodec mediaCodec : mediaCodecList) {
                 mediaCodec.release();
             }
-            InstrumentationRegistry.getInstrumentation().getUiAutomation()
-                    .dropShellPermissionIdentity();
             destroyHighPriorityProcess();
             destroyLowPriorityProcess();
             // Allow time for the codecs and other resources to be released
             Thread.sleep(500);
+            InstrumentationRegistry.getInstrumentation().getUiAutomation()
+                    .dropShellPermissionIdentity();
         }
     }
 
@@ -268,12 +268,12 @@
             for (MediaCodec mediaCodec : mediaCodecList) {
                 mediaCodec.release();
             }
-            InstrumentationRegistry.getInstrumentation().getUiAutomation()
-                .dropShellPermissionIdentity();
             destroyHighPriorityProcess();
             destroyLowPriorityProcess();
             // Allow time for the codecs and other resources to be released
             Thread.sleep(500);
+            InstrumentationRegistry.getInstrumentation().getUiAutomation()
+                .dropShellPermissionIdentity();
         }
     }
 
diff --git a/tests/tests/media/decoder/src/android/media/decoder/cts/DecoderTest.java b/tests/tests/media/decoder/src/android/media/decoder/cts/DecoderTest.java
index 4a3f482..83abbe2 100644
--- a/tests/tests/media/decoder/src/android/media/decoder/cts/DecoderTest.java
+++ b/tests/tests/media/decoder/src/android/media/decoder/cts/DecoderTest.java
@@ -33,6 +33,7 @@
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
+import android.app.ActivityManager;
 import android.content.Context;
 import android.content.pm.PackageManager;
 import android.content.res.AssetFileDescriptor;
@@ -3329,12 +3330,19 @@
         assertTrue("MediaCodecPlayer.prepare() failed!", mMediaCodecPlayer.prepare());
         mMediaCodecPlayer.startCodec();
 
+        // When video codecs are started, large chunks of contiguous physical memory need to be
+        // allocated, which, on low-RAM devices, can trigger high CPU usage for moving memory
+        // around to create contiguous space for the video decoder. This can cause an increase in
+        // startup time for playback.
+        ActivityManager activityManager = mContext.getSystemService(ActivityManager.class);
+        long firstFrameRenderedTimeoutSeconds = activityManager.isLowRamDevice() ? 3 : 1;
+
         mMediaCodecPlayer.play();
         sleepUntil(() ->
                 mMediaCodecPlayer.getCurrentPosition() > CodecState.UNINITIALIZED_TIMESTAMP
                 && mMediaCodecPlayer.getTimestamp() != null
                 && mMediaCodecPlayer.getTimestamp().framePosition > 0,
-                Duration.ofSeconds(1));
+                Duration.ofSeconds(firstFrameRenderedTimeoutSeconds));
         assertNotEquals("onFrameRendered was not called",
                 mMediaCodecPlayer.getVideoTimeUs(), CodecState.UNINITIALIZED_TIMESTAMP);
         assertNotEquals("Audio timestamp is null", mMediaCodecPlayer.getTimestamp(), null);
diff --git a/tests/tests/media/misc/src/android/media/misc/cts/MediaItemTest.java b/tests/tests/media/misc/src/android/media/misc/cts/MediaItemTest.java
index 747954b..290d3b8 100644
--- a/tests/tests/media/misc/src/android/media/misc/cts/MediaItemTest.java
+++ b/tests/tests/media/misc/src/android/media/misc/cts/MediaItemTest.java
@@ -15,23 +15,34 @@
  */
 package android.media.misc.cts;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
 import android.media.MediaDescription;
 import android.media.browse.MediaBrowser.MediaItem;
 import android.media.cts.NonMediaMainlineTest;
 import android.os.Parcel;
-import android.test.AndroidTestCase;
 import android.text.TextUtils;
 
+import androidx.test.ext.junit.runners.AndroidJUnit4;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
 /**
  * Test {@link android.media.browse.MediaBrowser.MediaItem}.
  */
 @NonMediaMainlineTest
-public class MediaItemTest extends AndroidTestCase {
+@RunWith(AndroidJUnit4.class)
+public class MediaItemTest {
     private static final String DESCRIPTION = "test_description";
     private static final String MEDIA_ID = "test_media_id";
     private static final String TITLE = "test_title";
     private static final String SUBTITLE = "test_subtitle";
 
+    @Test
     public void testBrowsableMediaItem() {
         MediaDescription description = new MediaDescription.Builder()
                 .setDescription(DESCRIPTION).setMediaId(MEDIA_ID)
@@ -58,6 +69,7 @@
         p.recycle();
     }
 
+    @Test
     public void testPlayableMediaItem() {
         MediaDescription description = new MediaDescription.Builder()
                 .setDescription(DESCRIPTION).setMediaId(MEDIA_ID)
diff --git a/tests/tests/media/misc/src/android/media/misc/cts/MediaMetadataRetrieverTest.java b/tests/tests/media/misc/src/android/media/misc/cts/MediaMetadataRetrieverTest.java
index 780f18d..dddcbf2 100644
--- a/tests/tests/media/misc/src/android/media/misc/cts/MediaMetadataRetrieverTest.java
+++ b/tests/tests/media/misc/src/android/media/misc/cts/MediaMetadataRetrieverTest.java
@@ -21,6 +21,13 @@
 import static android.media.MediaMetadataRetriever.OPTION_NEXT_SYNC;
 import static android.media.MediaMetadataRetriever.OPTION_PREVIOUS_SYNC;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import android.content.Context;
 import android.content.pm.PackageManager;
 import android.content.res.AssetFileDescriptor;
 import android.graphics.Bitmap;
@@ -35,28 +42,33 @@
 import android.media.cts.CodecUtils;
 import android.media.cts.Preconditions;
 import android.media.cts.TestMediaDataSource;
-import android.media.cts.TestUtils;
-import android.os.ParcelFileDescriptor;
 import android.net.Uri;
 import android.os.Build;
 import android.os.Environment;
+import android.os.ParcelFileDescriptor;
 import android.platform.test.annotations.AppModeFull;
 import android.platform.test.annotations.Presubmit;
 import android.platform.test.annotations.RequiresDevice;
-import android.test.AndroidTestCase;
 import android.util.Log;
 import android.view.Display;
 
+import androidx.test.ext.junit.runners.AndroidJUnit4;
 import androidx.test.filters.SmallTest;
+import androidx.test.platform.app.InstrumentationRegistry;
 
 import com.android.compatibility.common.util.ApiLevelUtil;
 import com.android.compatibility.common.util.MediaUtils;
 
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
 import java.io.Closeable;
 import java.io.File;
 import java.io.FileInputStream;
-import java.io.FileOutputStream;
 import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
@@ -68,7 +80,8 @@
 @SmallTest
 @RequiresDevice
 @AppModeFull(reason = "No interaction with system server")
-public class MediaMetadataRetrieverTest extends AndroidTestCase {
+@RunWith(AndroidJUnit4.class)
+public class MediaMetadataRetrieverTest {
     private static final String TAG = "MediaMetadataRetrieverTest";
     private static final boolean SAVE_BITMAP_OUTPUT = false;
     private static final String TEST_MEDIA_FILE = "retriever_test.3gp";
@@ -93,16 +106,18 @@
     private boolean mIsAtLeastR = ApiLevelUtil.isAtLeast(Build.VERSION_CODES.R);
     private boolean mIsAtLeastS = ApiLevelUtil.isAtLeast(Build.VERSION_CODES.S);
 
-    @Override
-    protected void setUp() throws Exception {
-        super.setUp();
+    private Context getContext() {
+        return InstrumentationRegistry.getInstrumentation().getContext();
+    }
+
+    @Before
+    public void setUp() throws Exception {
         mRetriever = new MediaMetadataRetriever();
         mPackageManager = getContext().getPackageManager();
     }
 
-    @Override
-    protected void tearDown() throws Exception {
-        super.tearDown();
+    @After
+    public void tearDown() throws Exception {
         mRetriever.release();
         File file = new File(Environment.getExternalStorageDirectory(), TEST_MEDIA_FILE);
         if (file.exists()) {
@@ -157,28 +172,44 @@
         return ds;
     }
 
+    private static class WrappedDataSource extends MediaDataSource {
+        private final MediaDataSource mBackingMediaDataSource;
+        private boolean mFinished = false;
+
+        WrappedDataSource(MediaDataSource backingMediaDataSource) {
+            mBackingMediaDataSource = backingMediaDataSource;
+        }
+
+        @Override
+        public int readAt(long position, byte[] buffer, int offset, int size)
+                throws IOException {
+            return mBackingMediaDataSource.readAt(position, buffer, offset, size);
+        }
+
+        @Override
+        public long getSize() throws IOException {
+            return mBackingMediaDataSource.getSize();
+        }
+
+        @Override
+        public void close() throws IOException {
+            mBackingMediaDataSource.close();
+            if (!mFinished) {
+                throw new IOException();
+            }
+        }
+
+        public void finish() {
+            mFinished = true;
+        }
+    }
+
+    @Test
     public void testExceptionWhileClosingMediaDataSource() throws IOException {
         MediaDataSource backingMediaDataSource =
                 TestMediaDataSource.fromAssetFd(
                         getAssetFileDescriptorFor("audio_with_metadata.mp3"));
-        MediaDataSource mediaDataSource = new MediaDataSource() {
-            @Override
-            public int readAt(long position, byte[] buffer, int offset, int size)
-                    throws IOException {
-                return backingMediaDataSource.readAt(position, buffer, offset, size);
-            }
-
-            @Override
-            public long getSize() throws IOException {
-                return backingMediaDataSource.getSize();
-            }
-
-            @Override
-            public void close() throws IOException {
-                backingMediaDataSource.close();
-                throw new IOException();
-            }
-        };
+        WrappedDataSource mediaDataSource = new WrappedDataSource(backingMediaDataSource);
         mRetriever.setDataSource(mediaDataSource);
         try {
             mRetriever.release();
@@ -186,8 +217,13 @@
         } catch (IOException e) {
             // Expected.
         }
+        // MediaDataSource implements Closeable interface, so the finalizer will
+        // try to close the object. If close() always throws an exception, the
+        // finalizer will bring down the test.
+        mediaDataSource.finish();
     }
 
+    @Test
     public void testAudioMetadata() {
         setDataSourceCallback("audio_with_metadata.mp3");
 
@@ -221,6 +257,7 @@
             mRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_MIMETYPE));
     }
 
+    @Test
     public void test3gppMetadata() {
         setDataSourceCallback("testvideo.3gp");
 
@@ -317,6 +354,7 @@
                 mRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_WRITER));
     }
 
+    @Test
     public void testID3v2Metadata() {
         setDataSourceFd(
                 "video_480x360_mp4_h264_500kbps_25fps_aac_stereo_128kbps_44100hz_id3v2.mp4");
@@ -414,6 +452,7 @@
                 mRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_WRITER));
     }
 
+    @Test
     public void testID3v2Unsynchronization() {
         setDataSourceFd("testmp3_4.mp3");
         assertEquals("Mime type was other than expected",
@@ -421,6 +460,7 @@
                 mRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_MIMETYPE));
     }
 
+    @Test
     public void testID3v240ExtHeader() {
         setDataSourceFd("sinesweepid3v24ext.mp3");
         assertEquals("Mime type was other than expected",
@@ -433,6 +473,7 @@
                 mRetriever.getEmbeddedPicture());
     }
 
+    @Test
     public void testID3v230ExtHeader() {
         setDataSourceFd("sinesweepid3v23ext.mp3");
         assertEquals("Mime type was other than expected",
@@ -445,6 +486,7 @@
                 mRetriever.getEmbeddedPicture());
     }
 
+    @Test
     public void testID3v230ExtHeaderBigEndian() {
         setDataSourceFd("sinesweepid3v23extbe.mp3");
         assertEquals("Mime type was other than expected",
@@ -457,6 +499,7 @@
                 mRetriever.getEmbeddedPicture());
     }
 
+    @Test
     public void testMp4AlbumArt() {
         setDataSourceFd("swirl_128x128_h264_albumart.mp4");
         assertEquals("Mime type was other than expected",
@@ -466,6 +509,7 @@
                 mRetriever.getEmbeddedPicture());
     }
 
+    @Test
     public void testGenreParsing() {
         if (!MediaUtils.check(mIsAtLeastR, "test needs Android 11")) return;
         Object [][] genres = {
@@ -490,6 +534,7 @@
         }
     }
 
+    @Test
     public void testBitsPerSampleAndSampleRate() {
         setDataSourceFd("testwav_16bit_44100hz.wav");
 
@@ -503,17 +548,20 @@
 
     }
 
+    @Test
     public void testGetEmbeddedPicture() {
         setDataSourceFd("largealbumart.mp3");
 
         assertNotNull("couldn't retrieve album art", mRetriever.getEmbeddedPicture());
     }
 
+    @Test
     public void testAlbumArtInOgg() throws Exception {
         setDataSourceFd("sinesweepoggalbumart.ogg");
         assertNotNull("couldn't retrieve album art from ogg", mRetriever.getEmbeddedPicture());
     }
 
+    @Test
     public void testSetDataSourcePath() {
         copyMediaFile();
         File file = new File(Environment.getExternalStorageDirectory(), TEST_MEDIA_FILE);
@@ -524,6 +572,7 @@
         }
     }
 
+    @Test
     public void testSetDataSourceUri() {
         copyMediaFile();
         File file = new File(Environment.getExternalStorageDirectory(), TEST_MEDIA_FILE);
@@ -535,6 +584,7 @@
         }
     }
 
+    @Test
     public void testSetDataSourceNullPath() {
         try {
             mRetriever.setDataSource((String)null);
@@ -544,6 +594,7 @@
         }
     }
 
+    @Test
     public void testSetDataSourceNullUri() {
         try {
             mRetriever.setDataSource(getContext(), (Uri)null);
@@ -553,6 +604,7 @@
         }
     }
 
+    @Test
     public void testNullMediaDataSourceIsRejected() {
         try {
             mRetriever.setDataSource((MediaDataSource)null);
@@ -562,6 +614,7 @@
         }
     }
 
+    @Test
     public void testMediaDataSourceIsClosedOnRelease() throws Exception {
         TestMediaDataSource dataSource = setDataSourceCallback("testvideo.3gp");
         mRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_TITLE);
@@ -569,6 +622,7 @@
         assertTrue(dataSource.isClosed());
     }
 
+    @Test
     public void testRetrieveFailsIfMediaDataSourceThrows() throws Exception {
         TestMediaDataSource ds = getFaultyDataSource("testvideo.3gp", true /* throwing */);
         try {
@@ -579,6 +633,7 @@
         }
     }
 
+    @Test
     public void testRetrieveFailsIfMediaDataSourceReturnsAnError() throws Exception {
         TestMediaDataSource ds = getFaultyDataSource("testvideo.3gp", false /* throwing */);
         try {
@@ -637,6 +692,7 @@
         }
     }
 
+    @Test
     public void testThumbnailH264() {
         testThumbnail(
                 "bbb_s4_1280x720_mp4_h264_mp31_8mbps_30fps_aac_he_mono_40kbps_44100hz.mp4",
@@ -644,10 +700,12 @@
                 720);
     }
 
+    @Test
     public void testThumbnailH263() {
         testThumbnail("video_176x144_3gp_h263_56kbps_12fps_aac_mono_24kbps_11025hz.3gp", 176, 144);
     }
 
+    @Test
     public void testThumbnailMPEG4() {
         testThumbnail(
                 "video_1280x720_mp4_mpeg4_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4",
@@ -655,6 +713,7 @@
                 720);
     }
 
+    @Test
     public void testThumbnailVP8() {
         testThumbnail(
                 "bbb_s1_640x360_webm_vp8_2mbps_30fps_vorbis_5ch_320kbps_48000hz.webm",
@@ -662,6 +721,7 @@
                 360);
     }
 
+    @Test
     public void testThumbnailVP9() {
         testThumbnail(
                 "bbb_s1_640x360_webm_vp9_0p21_1600kbps_30fps_vorbis_stereo_128kbps_48000hz.webm",
@@ -669,6 +729,7 @@
                 360);
     }
 
+    @Test
     public void testThumbnailHEVC() {
         testThumbnail(
                 "bbb_s1_720x480_mp4_hevc_mp3_1600kbps_30fps_aac_he_6ch_240kbps_48000hz.mp4",
@@ -676,10 +737,11 @@
                 480);
     }
 
+    @Test
     public void testThumbnailVP9Hdr() {
         if (!MediaUtils.check(mIsAtLeastR, "test needs Android 11")) return;
 
-        DisplayManager displayManager = mContext.getSystemService(DisplayManager.class);
+        DisplayManager displayManager = getContext().getSystemService(DisplayManager.class);
         int numberOfSupportedHdrTypes =
             displayManager.getDisplay(Display.DEFAULT_DISPLAY).getHdrCapabilities()
                 .getSupportedHdrTypes().length;
@@ -692,10 +754,11 @@
         testThumbnail("video_1280x720_vp9_hdr_static_3mbps.mkv", 1280, 720);
     }
 
+    @Test
     public void testThumbnailAV1Hdr() {
         if (!MediaUtils.check(mIsAtLeastR, "test needs Android 11")) return;
 
-        DisplayManager displayManager = mContext.getSystemService(DisplayManager.class);
+        DisplayManager displayManager = getContext().getSystemService(DisplayManager.class);
         int numberOfSupportedHdrTypes =
             displayManager.getDisplay(Display.DEFAULT_DISPLAY).getHdrCapabilities()
                 .getSupportedHdrTypes().length;
@@ -708,6 +771,7 @@
         testThumbnail("video_1280x720_av1_hdr_static_3mbps.webm", 1280, 720);
     }
 
+    @Test
     public void testThumbnailHDR10() {
         if (!MediaUtils.check(mIsAtLeastR, "test needs Android 11")) return;
 
@@ -735,6 +799,7 @@
         verifyVideoFrameRotation(thumbnail, targetRotation);
     }
 
+    @Test
     public void testThumbnailWithRotation() {
         String[] res = {"video_h264_mpeg4_rotate_0.mp4", "video_h264_mpeg4_rotate_90.mp4",
                 "video_h264_mpeg4_rotate_180.mp4", "video_h264_mpeg4_rotate_270.mp4"};
@@ -759,30 +824,35 @@
      *     4) frame time is shortly before a sync frame
      *     5) frame time is shortly after a sync frame
      */
+    @Test
     public void testGetFrameAtTimePreviousSync() {
         int[][] testCases = {
                 { 2066666, 60 }, { 2500000, 60 }, { 2600000, 60 }, { 3000000, 60 }, { 3200000, 90}};
         testGetFrameAtTime(OPTION_PREVIOUS_SYNC, testCases);
     }
 
+    @Test
     public void testGetFrameAtTimeNextSync() {
         int[][] testCases = {
                 { 2066666, 60 }, { 2500000, 90 }, { 2600000, 90 }, { 3000000, 90 }, { 3200000, 120}};
         testGetFrameAtTime(OPTION_NEXT_SYNC, testCases);
     }
 
+    @Test
     public void testGetFrameAtTimeClosestSync() {
         int[][] testCases = {
                 { 2066666, 60 }, { 2500000, 60 }, { 2600000, 90 }, { 3000000, 90 }, { 3200000, 90}};
         testGetFrameAtTime(OPTION_CLOSEST_SYNC, testCases);
     }
 
+    @Test
     public void testGetFrameAtTimeClosest() {
         int[][] testCases = {
                 { 2066666, 60 }, { 2500001, 73 }, { 2599999, 76 }, { 3016000, 88 }, { 3184000, 94}};
         testGetFrameAtTime(OPTION_CLOSEST, testCases);
     }
 
+    @Test
     public void testGetFrameAtTimePreviousSyncEditList() {
         if (!MediaUtils.check(mIsAtLeastR, "test needs Android 11")) return;
         int[][] testCases = {
@@ -790,6 +860,7 @@
         testGetFrameAtTimeEditList(OPTION_PREVIOUS_SYNC, testCases);
     }
 
+    @Test
     public void testGetFrameAtTimeNextSyncEditList() {
         if (!MediaUtils.check(mIsAtLeastR, "test needs Android 11")) return;
         int[][] testCases = {
@@ -797,6 +868,7 @@
         testGetFrameAtTimeEditList(OPTION_NEXT_SYNC, testCases);
     }
 
+    @Test
     public void testGetFrameAtTimeClosestSyncEditList() {
         if (!MediaUtils.check(mIsAtLeastR, "test needs Android 11")) return;
         int[][] testCases = {
@@ -804,6 +876,7 @@
         testGetFrameAtTimeEditList(OPTION_CLOSEST_SYNC, testCases);
     }
 
+    @Test
     public void testGetFrameAtTimeClosestEditList() {
         if (!MediaUtils.check(mIsAtLeastR, "test needs Android 11")) return;
         int[][] testCases = {
@@ -811,6 +884,7 @@
         testGetFrameAtTimeEditList(OPTION_CLOSEST, testCases);
     }
 
+    @Test
     public void testGetFrameAtTimePreviousSyncEmptyNormalEditList() {
         if (!MediaUtils.check(mIsAtLeastR, "test needs Android 11")) return;
         int[][] testCases = {
@@ -818,6 +892,7 @@
         testGetFrameAtTimeEmptyNormalEditList(OPTION_PREVIOUS_SYNC, testCases);
     }
 
+    @Test
     public void testGetFrameAtTimeNextSyncEmptyNormalEditList() {
         if (!MediaUtils.check(mIsAtLeastR, "test needs Android 11")) return;
         int[][] testCases = {{ 2000000, 60 }, { 2133000, 60 }, { 2566334, 90 }, { 3100000, 90 },
@@ -825,6 +900,7 @@
         testGetFrameAtTimeEmptyNormalEditList(OPTION_NEXT_SYNC, testCases);
     }
 
+    @Test
     public void testGetFrameAtTimeClosestSyncEmptyNormalEditList() {
         if (!MediaUtils.check(mIsAtLeastR, "test needs Android 11")) return;
         int[][] testCases = {
@@ -832,6 +908,7 @@
         testGetFrameAtTimeEmptyNormalEditList(OPTION_CLOSEST_SYNC, testCases);
     }
 
+    @Test
     public void testGetFrameAtTimeClosestEmptyNormalEditList() {
         if (!MediaUtils.check(mIsAtLeastR, "test needs Android 11")) return;
         int[][] testCases = {
@@ -879,6 +956,7 @@
         });
     }
 
+    @Test
     public void testGetFrameAtIndex() {
         int[][] testCases = { { 60, 60 }, { 73, 73 }, { 76, 76 }, { 88, 88 }, { 94, 94} };
 
@@ -906,6 +984,7 @@
         });
     }
 
+    @Test
     public void testGetFramesAtIndex() {
         int[][] testCases = { { 27, 27 }, { 28, 28 }, { 29, 29 }, { 30, 30 }, { 31, 31} };
 
@@ -991,6 +1070,7 @@
     /**
      * The following tests verifies MediaMetadataRetriever.getScaledFrameAtTime behavior.
      */
+    @Test
     public void testGetScaledFrameAtTimeWithInvalidResolutions() {
         String[] resources = {"binary_counter_320x240_30fps_600frames.mp4",
                 "binary_counter_320x240_30fps_600frames_editlist.mp4",
@@ -1060,6 +1140,7 @@
         assertEquals("Bitmap height is wrong", expectedHeight, bitmap.getHeight());
     }
 
+    @Test
     public void testGetScaledFrameAtTime() {
         String res = "binary_counter_320x240_30fps_600frames.mp4";
         setDataSourceFd(res);
@@ -1090,6 +1171,7 @@
         testGetScaledFrameAtTime(330, 240, 330, 110, null);
     }
 
+    @Test
     public void testGetImageAtIndex() throws Exception {
         if (!MediaUtils.hasDecoder(MediaFormat.MIMETYPE_VIDEO_HEVC)) {
             MediaUtils.skipTest("no video decoders for HEVC");
@@ -1100,6 +1182,7 @@
                 4 /*imageCount*/, 3 /*primary*/, true /*useGrid*/, true /*checkColor*/);
     }
 
+    @Test
     public void testGetImageAtIndexAvif() throws Exception {
         if (!MediaUtils.check(mIsAtLeastS, "test needs Android 12")) return;
         if (!MediaUtils.canDecodeVideo("AV1", 1920, 1080, 30)) {
@@ -1110,6 +1193,7 @@
                 1 /*imageCount*/, 0 /*primary*/, false /*useGrid*/, true /*checkColor*/);
     }
 
+    @Test
     public void testGetImageAtIndexAvifGrid() throws Exception {
         if (!MediaUtils.check(mIsAtLeastS, "test needs Android 12")) return;
         if (!MediaUtils.canDecodeVideo("AV1", 512, 512, 30)) {
diff --git a/tests/tests/media/misc/src/android/media/misc/cts/ResourceManagerRecorderActivity.java b/tests/tests/media/misc/src/android/media/misc/cts/ResourceManagerRecorderActivity.java
index dba4da8..9335b58 100644
--- a/tests/tests/media/misc/src/android/media/misc/cts/ResourceManagerRecorderActivity.java
+++ b/tests/tests/media/misc/src/android/media/misc/cts/ResourceManagerRecorderActivity.java
@@ -75,7 +75,7 @@
         if (extras != null) {
             mHighResolution = extras.getBoolean("high-resolution", mHighResolution);
             mMime = extras.getString("mime", mMime);
-            if (mMime == MediaFormat.MIMETYPE_VIDEO_HEVC) {
+            if (mMime.equals(MediaFormat.MIMETYPE_VIDEO_HEVC)) {
                 mVideoEncoderType = MediaRecorder.VideoEncoder.HEVC;
             }
         }
diff --git a/tests/tests/net/native/Android.bp b/tests/tests/net/native/Android.bp
index 072e380..e52cb7c 100644
--- a/tests/tests/net/native/Android.bp
+++ b/tests/tests/net/native/Android.bp
@@ -46,7 +46,7 @@
 
     shared_libs: [
         "libandroid",
-        "libbinder",
+        "libbinder_ndk",
         "liblog",
         "libutils",
     ],
diff --git a/tests/tests/net/native/src/TagSocketTest.cpp b/tests/tests/net/native/src/TagSocketTest.cpp
index 253dc5a..c1533bc 100644
--- a/tests/tests/net/native/src/TagSocketTest.cpp
+++ b/tests/tests/net/native/src/TagSocketTest.cpp
@@ -15,15 +15,22 @@
  *
  */
 
+#include <thread>
+
+#include <android-base/file.h>
 #include <android-base/format.h>
+#include <android/binder_auto_utils.h>
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+#include <android/binder_status.h>
 #include <android/multinetwork.h>
-#include <binder/IServiceManager.h>
+#include <android-base/unique_fd.h>
 #include <bpf/BpfUtils.h>
 #include <gtest/gtest.h>
 #include <nettestutils/DumpService.h>
 
-using android::IBinder;
-using android::IServiceManager;
+using android::base::unique_fd;
+using android::base::ReadFdToString;
 using android::bpf::getSocketCookie;
 using android::bpf::NONEXISTENT_COOKIE;
 using android::sp;
@@ -33,14 +40,11 @@
 class TagSocketTest : public ::testing::Test {
  public:
   TagSocketTest() {
-    sp<IServiceManager> sm = android::defaultServiceManager();
-    mBinder = sm->getService(String16("connectivity"));
+    mBinder = ndk::SpAIBinder(AServiceManager_waitForService("connectivity"));
   }
 
-  void SetUp() override { ASSERT_NE(nullptr, mBinder.get()); }
-
  protected:
-  sp<IBinder> mBinder;
+  ndk::SpAIBinder mBinder;
 };
 
 namespace {
@@ -48,15 +52,45 @@
 constexpr uid_t TEST_UID = 10086;
 constexpr uint32_t TEST_TAG = 42;
 
-[[maybe_unused]] void dumpBpfMaps(const sp<IBinder>& binder,
+android::status_t dumpService(const ndk::SpAIBinder& binder,
+                              const char** args,
+                              uint32_t num_args,
+                              std::vector<std::string>& outputLines) {
+  unique_fd localFd, remoteFd;
+  bool success = Pipe(&localFd, &remoteFd);
+  EXPECT_TRUE(success) << "Failed to open pipe for dumping: " << strerror(errno);
+  if (!success) return STATUS_UNKNOWN_ERROR;
+
+  // dump() blocks until another thread has consumed all its output.
+  std::thread dumpThread = std::thread([binder, remoteFd{std::move(remoteFd)}, args, num_args]() {
+    EXPECT_EQ(android::OK, AIBinder_dump(binder.get(), remoteFd, args, num_args));
+  });
+
+  std::string dumpContent;
+
+  EXPECT_TRUE(ReadFdToString(localFd.get(), &dumpContent))
+      << "Error during dump: " << strerror(errno);
+  dumpThread.join();
+
+  std::stringstream dumpStream(dumpContent);
+  std::string line;
+  while (std::getline(dumpStream, line)) {
+    outputLines.push_back(std::move(line));
+  }
+
+  return android::OK;
+}
+
+[[maybe_unused]] void dumpBpfMaps(const ndk::SpAIBinder& binder,
                                   std::vector<std::string>& output) {
   Vector<String16> vec;
-  android::status_t ret = dumpService(binder, {"trafficcontroller"}, output);
+  const char* arg = "trafficcontroller";
+  android::status_t ret = dumpService(binder, &arg, 1, output);
   ASSERT_EQ(android::OK, ret)
       << "Error dumping service: " << android::statusToString(ret);
 }
 
-[[maybe_unused]] bool socketIsTagged(const sp<IBinder>& binder, uint64_t cookie,
+[[maybe_unused]] bool socketIsTagged(const ndk::SpAIBinder& binder, uint64_t cookie,
                                      uid_t uid, uint32_t tag) {
   std::string match =
       fmt::format("cookie={} tag={:#x} uid={}", cookie, tag, uid);
@@ -68,7 +102,7 @@
   return false;
 }
 
-[[maybe_unused]] bool socketIsNotTagged(const sp<IBinder>& binder,
+[[maybe_unused]] bool socketIsNotTagged(const ndk::SpAIBinder& binder,
                                         uint64_t cookie) {
   std::string match = fmt::format("cookie={}", cookie);
   std::vector<std::string> lines = {};
@@ -79,7 +113,7 @@
   return true;
 }
 
-bool waitSocketIsNotTagged(const sp<IBinder>& binder, uint64_t cookie,
+bool waitSocketIsNotTagged(const ndk::SpAIBinder& binder, uint64_t cookie,
                            int maxTries) {
     for (int i = 0; i < maxTries; ++i) {
         if (socketIsNotTagged(binder, cookie)) return true;
diff --git a/tests/tests/permission2/res/raw/android_manifest.xml b/tests/tests/permission2/res/raw/android_manifest.xml
index 7399658..f6658ed 100644
--- a/tests/tests/permission2/res/raw/android_manifest.xml
+++ b/tests/tests/permission2/res/raw/android_manifest.xml
@@ -4681,6 +4681,12 @@
     <permission android:name="android.permission.GRANT_RUNTIME_PERMISSIONS"
         android:protectionLevel="signature|installer|verifier" />
 
+    <!-- Allows an application to launch the settings page which manages various
+         permissions.
+         @hide -->
+    <permission android:name="android.permission.LAUNCH_PERMISSION_SETTINGS"
+                android:protectionLevel="signature|privileged" />
+
     <!-- @SystemApi Allows an app that has this permission and the permissions to install packages
          to request certain runtime permissions to be granted at installation.
          @hide -->
diff --git a/tests/tests/permission2/src/android/permission2/cts/NoReceiveSmsPermissionTest.java b/tests/tests/permission2/src/android/permission2/cts/NoReceiveSmsPermissionTest.java
index e95f53a..b6d6514 100644
--- a/tests/tests/permission2/src/android/permission2/cts/NoReceiveSmsPermissionTest.java
+++ b/tests/tests/permission2/src/android/permission2/cts/NoReceiveSmsPermissionTest.java
@@ -26,6 +26,7 @@
 import android.platform.test.annotations.AppModeFull;
 import android.platform.test.annotations.SystemUserOnly;
 import android.telephony.SmsManager;
+import android.telephony.SubscriptionInfo;
 import android.telephony.SubscriptionManager;
 import android.test.AndroidTestCase;
 import android.text.TextUtils;
@@ -142,12 +143,19 @@
                  getContext().getSystemService(Context.TELEPHONY_SUBSCRIPTION_SERVICE);
         int subscriptionId = subscription.getActiveDataSubscriptionId();
 
+        assertFalse("[RERUN] No active telephony subscription. Check there is one enabled.",
+                subscriptionId == SubscriptionManager.INVALID_SUBSCRIPTION_ID);
+
         // get current phone number
         String currentNumber = subscription.getPhoneNumber(subscriptionId);
 
         // fallback to getActiveSubscriptionInfo if number is empty
         if (TextUtils.isEmpty(currentNumber)) {
-            currentNumber = subscription.getActiveSubscriptionInfo(subscriptionId).getNumber();
+            SubscriptionInfo subInfo = subscription.getActiveSubscriptionInfo(subscriptionId);
+
+            assertTrue("[RERUN] No info for the active telephony subscription.",
+                    subInfo != null);
+            currentNumber = subInfo.getNumber();
         }
 
         assertFalse("[RERUN] SIM card does not provide phone number. Use a suitable SIM Card.",
diff --git a/tests/tests/permission2/src/android/permission2/cts/PermissionPolicyTest.java b/tests/tests/permission2/src/android/permission2/cts/PermissionPolicyTest.java
index d8ea3f7..46dd1cd 100644
--- a/tests/tests/permission2/src/android/permission2/cts/PermissionPolicyTest.java
+++ b/tests/tests/permission2/src/android/permission2/cts/PermissionPolicyTest.java
@@ -78,6 +78,9 @@
     private static final String BIND_QUICK_SETTINGS_TILE =
             "android.permission.BIND_QUICK_SETTINGS_TILE";
 
+    private static final String LAUNCH_PERMISSION_SETTINGS =
+            "android.permission.LAUNCH_PERMISSION_SETTINGS";
+
     private static final String LOG_TAG = "PermissionProtectionTest";
 
     private static final String PLATFORM_PACKAGE_NAME = "android";
@@ -513,6 +516,8 @@
                 return parseDate(SECURITY_PATCH).before(MANAGE_COMPANION_DEVICES_PATCH_DATE);
             case SET_UNRESTRICTED_GESTURE_EXCLUSION:
                 return true;
+            case LAUNCH_PERMISSION_SETTINGS:
+                return true;
             default:
                 return false;
         }
diff --git a/tests/tests/uirendering/src/android/uirendering/cts/testclasses/ForceDarkTests.java b/tests/tests/uirendering/src/android/uirendering/cts/testclasses/ForceDarkTests.java
index 0b29c37..38861b6 100644
--- a/tests/tests/uirendering/src/android/uirendering/cts/testclasses/ForceDarkTests.java
+++ b/tests/tests/uirendering/src/android/uirendering/cts/testclasses/ForceDarkTests.java
@@ -16,6 +16,7 @@
 
 package android.uirendering.cts.testclasses;
 
+import android.Manifest;
 import android.app.UiModeManager;
 import android.content.Context;
 import android.graphics.Color;
@@ -51,14 +52,19 @@
                 InstrumentationRegistry.getContext().getSystemService(Context.UI_MODE_SERVICE);
         sPreviousUiMode = uiManager.getNightMode();
         if (sPreviousUiMode != UiModeManager.MODE_NIGHT_YES) {
-            SystemUtil.runShellCommand("service call uimode 4 i32 2");
+            SystemUtil.runWithShellPermissionIdentity(
+                    () -> uiManager.setNightMode(UiModeManager.MODE_NIGHT_YES),
+                    Manifest.permission.MODIFY_DAY_NIGHT_MODE);
         }
     }
 
     @AfterClass
     public static void restoreForceDarkSetting() {
+        UiModeManager uiManager = (UiModeManager)
+                InstrumentationRegistry.getContext().getSystemService(Context.UI_MODE_SERVICE);
         if (sPreviousUiMode != UiModeManager.MODE_NIGHT_YES) {
-            SystemUtil.runShellCommand("service call uimode 4 i32 " + sPreviousUiMode);
+            SystemUtil.runWithShellPermissionIdentity(() -> uiManager.setNightMode(sPreviousUiMode),
+                    Manifest.permission.MODIFY_DAY_NIGHT_MODE);
         }
     }
 
diff --git a/tests/tests/usb/OWNERS b/tests/tests/usb/OWNERS
index ad5a496..1fe0677 100644
--- a/tests/tests/usb/OWNERS
+++ b/tests/tests/usb/OWNERS
@@ -1,4 +1,8 @@
 # Bug component: 175220
+aprasath@google.com
+kumarashishg@google.com
+sarup@google.com
+anothermark@google.com
 badhri@google.com
 elaurent@google.com
 albertccwang@google.com
diff --git a/tests/tests/view/AndroidManifest.xml b/tests/tests/view/AndroidManifest.xml
index ba1205e..1317289 100644
--- a/tests/tests/view/AndroidManifest.xml
+++ b/tests/tests/view/AndroidManifest.xml
@@ -306,7 +306,7 @@
                   android:label="HandleConfigurationActivity"
                   android:rotationAnimation="jumpcut"
                   android:configChanges="orientation|screenSize|screenLayout|smallestScreenSize"
-                  android:theme="@android:style/Theme.Material.Dialog.NoActionBar"
+                  android:theme="@android:style/Theme.Material.NoActionBar"
                   android:exported="true">
             <intent-filter>
                 <action android:name="android.intent.action.MAIN"/>
diff --git a/tests/tests/wrap/OWNERS b/tests/tests/wrap/OWNERS
index 6ea7640..72490c9 100644
--- a/tests/tests/wrap/OWNERS
+++ b/tests/tests/wrap/OWNERS
@@ -1,2 +1,4 @@
 # Bug component: 86431
 yabinc@google.com
+cferris@google.com
+danalbert@google.com
\ No newline at end of file
diff --git a/tests/tests/wrap/hwasan/Android.bp b/tests/tests/wrap/hwasan/Android.bp
new file mode 100644
index 0000000..3609137
--- /dev/null
+++ b/tests/tests/wrap/hwasan/Android.bp
@@ -0,0 +1,95 @@
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    // See: http://go/android-license-faq
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+java_library {
+    name: "cts_tests_tests_hwasan_src",
+    srcs: ["src/**/*.java"],
+    libs: [
+        "compatibility-device-util-axt",
+        "android.test.runner.stubs",
+        "android.test.base.stubs",
+    ],
+}
+
+filegroup {
+    name: "hwasan_wrap.sh",
+    srcs: [
+        "wrap.sh",
+    ],
+    path: ".",
+}
+
+android_test {
+    name: "CtsWrapHwasanTestCases",
+    compile_multilib: "both",
+    dex_preopt: {
+        enabled: false,
+    },
+    optimize: {
+        enabled: false,
+    },
+    static_libs: [
+        "compatibility-device-util-axt",
+        "androidx.test.ext.junit",
+        "androidx.test.rules",
+        "hwasan_debug_lib",
+        "cts_tests_tests_hwasan_src",
+    ],
+    libs: [
+        "android.test.runner.stubs",
+        "android.test.base.stubs",
+    ],
+    jni_libs: [
+        "libcts_wrap_hwasan_jni",
+        "libcts_jni",
+    ],
+    test_suites: [
+        "cts",
+        "general-tests",
+    ],
+    sdk_version: "test_current",
+    manifest: "AndroidManifest.xml",
+    use_embedded_native_libs: false,
+}
+
+cc_library {
+    name: "libcts_wrap_hwasan_jni",
+    srcs: ["jni/cts_wrap_hwasan_jni.cpp"],
+    header_libs: ["jni_headers"],
+    sdk_version: "current",
+    stl: "libc++",
+}
+
+java_genrule {
+    name: "hwasan_debug_lib",
+    srcs: [":hwasan_wrap.sh"],
+    tools: ["soong_zip"],
+    out: ["hwasan_debug_abi.jar"],
+    cmd: "mkdir -p $(genDir)/lib/armeabi-v7a/ && " +
+         "mkdir -p $(genDir)/lib/arm64-v8a/ && " +
+         "mkdir -p $(genDir)/lib/x86/ && " +
+         "mkdir -p $(genDir)/lib/x86_64/ && " +
+         "cp $(in) $(genDir)/lib/armeabi-v7a/ && " +
+         "cp $(in) $(genDir)/lib/arm64-v8a/ && " +
+         "cp $(in) $(genDir)/lib/x86/ && " +
+         "cp $(in) $(genDir)/lib/x86_64/ && " +
+         "$(location soong_zip) -o $(out) -C $(genDir) " +
+         "-D $(genDir)/lib/armeabi-v7a/ -D $(genDir)/lib/arm64-v8a/ " +
+         "-D $(genDir)/lib/x86/ -D $(genDir)/lib/x86_64/",
+}
diff --git a/tests/tests/wrap/hwasan/AndroidManifest.xml b/tests/tests/wrap/hwasan/AndroidManifest.xml
new file mode 100644
index 0000000..29502ea
--- /dev/null
+++ b/tests/tests/wrap/hwasan/AndroidManifest.xml
@@ -0,0 +1,39 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ -->
+
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+     package="android.wrap.hwasan.cts">
+
+    <!-- Ensure that wrap.sh is extracted. -->
+    <application android:debuggable="true"
+         android:extractNativeLibs="true">
+        <uses-library android:name="android.test.runner"/>
+        <activity android:name="android.hwasan.WrapActivity"
+             android:exported="true">
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN"/>
+                <category android:name="android.intent.category.LAUNCHER"/>
+            </intent-filter>
+        </activity>
+    </application>
+
+    <!--  self-instrumenting test package. -->
+    <instrumentation android:name="androidx.test.runner.AndroidJUnitRunner"
+         android:label="CTS tests for wrap.sh"
+         android:targetPackage="android.wrap.hwasan.cts">
+    </instrumentation>
+</manifest>
diff --git a/tests/tests/wrap/hwasan/AndroidTest.xml b/tests/tests/wrap/hwasan/AndroidTest.xml
new file mode 100644
index 0000000..8f6e626
--- /dev/null
+++ b/tests/tests/wrap/hwasan/AndroidTest.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2023 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration description="Config for CTS HWASan Wrap test cases">
+    <option name="test-suite-tag" value="cts" />
+    <option name="config-descriptor:metadata" key="component" value="art" />
+    <option name="not-shardable" value="true" />
+    <option name="config-descriptor:metadata" key="parameter" value="not_instant_app" />
+    <option name="config-descriptor:metadata" key="parameter" value="no_foldable_states" />
+    <option name="config-descriptor:metadata" key="parameter" value="multi_abi" />
+    <option name="config-descriptor:metadata" key="parameter" value="secondary_user" />
+    <target_preparer class="com.android.tradefed.targetprep.suite.SuiteApkInstaller">
+        <option name="cleanup-apks" value="true" />
+        <option name="test-file-name" value="CtsWrapHwasanTestCases.apk" />
+    </target_preparer>
+    <test class="com.android.tradefed.testtype.AndroidJUnitTest" >
+        <option name="package" value="android.wrap.hwasan.cts" />
+    </test>
+</configuration>
diff --git a/tests/tests/wrap/hwasan/OWNERS b/tests/tests/wrap/hwasan/OWNERS
new file mode 100644
index 0000000..13edd31
--- /dev/null
+++ b/tests/tests/wrap/hwasan/OWNERS
@@ -0,0 +1,5 @@
+# Bug component: 14890
+eugenis@google.com
+pcc@google.com
+mitchp@google.com
+fmayer@google.com
diff --git a/tests/tests/wrap/hwasan/jni/cts_wrap_hwasan_jni.cpp b/tests/tests/wrap/hwasan/jni/cts_wrap_hwasan_jni.cpp
new file mode 100644
index 0000000..df14ee6
--- /dev/null
+++ b/tests/tests/wrap/hwasan/jni/cts_wrap_hwasan_jni.cpp
@@ -0,0 +1,24 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <jni.h>
+
+extern "C" void __hwasan_init() __attribute__((weak));
+
+extern "C" JNIEXPORT jboolean JNICALL
+Java_android_wrap_hwasan_cts_WrapTest_runningWithHwasan(JNIEnv*) {
+    return __hwasan_init != nullptr;
+}
diff --git a/tests/tests/wrap/hwasan/src/android/hwasan/WrapActivity.java b/tests/tests/wrap/hwasan/src/android/hwasan/WrapActivity.java
new file mode 100644
index 0000000..00aac6e
--- /dev/null
+++ b/tests/tests/wrap/hwasan/src/android/hwasan/WrapActivity.java
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hwasan;
+
+import android.app.Activity;
+import android.os.Bundle;
+
+/** A simple no-op activity. */
+public class WrapActivity extends Activity {
+
+    @Override
+    public void onCreate(Bundle icicle) {
+        super.onCreate(icicle);
+    }
+}
diff --git a/tests/tests/wrap/hwasan/src/android/hwasan/cts/WrapTest.java b/tests/tests/wrap/hwasan/src/android/hwasan/cts/WrapTest.java
new file mode 100644
index 0000000..744502a
--- /dev/null
+++ b/tests/tests/wrap/hwasan/src/android/hwasan/cts/WrapTest.java
@@ -0,0 +1,61 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.wrap.hwasan.cts;
+
+
+import android.hwasan.WrapActivity;
+import android.test.ActivityInstrumentationTestCase2;
+
+import com.android.compatibility.common.util.CpuFeatures;
+
+public class WrapTest extends ActivityInstrumentationTestCase2<WrapActivity> {
+    static {
+        System.loadLibrary("cts_wrap_hwasan_jni");
+    }
+
+    private WrapActivity mActivity;
+
+    public WrapTest() {
+        super(WrapActivity.class);
+    }
+
+    @Override
+    protected void setUp() throws Exception {
+        super.setUp();
+        // Start the activity.
+        mActivity = getActivity();
+        // Wait for the UI Thread to become idle.
+        getInstrumentation().waitForIdleSync();
+    }
+
+    @Override
+    protected void tearDown() throws Exception {
+        // Nothing to do here.
+        super.tearDown();
+    }
+
+    public void testProperty() throws Exception {
+        if (!CpuFeatures.isArm64Cpu()) return;
+        assertTrue(System.getenv("LD_HWASAN") != null);
+    }
+
+    public static native boolean runningWithHwasan();
+
+    public void testRunningWithHwasan() throws Exception {
+        if (!CpuFeatures.isArm64Cpu()) return;
+        assertTrue(runningWithHwasan());
+    }
+}
diff --git a/tests/tests/wrap/hwasan/wrap.sh b/tests/tests/wrap/hwasan/wrap.sh
new file mode 100755
index 0000000..2836232
--- /dev/null
+++ b/tests/tests/wrap/hwasan/wrap.sh
@@ -0,0 +1,15 @@
+#!/system/bin/sh
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+LD_HWASAN=1 $@
diff --git a/tools/cts-tradefed/res/config/cts-on-gsi-on-r.xml b/tools/cts-tradefed/res/config/cts-on-gsi-on-r.xml
index d3d9be4..c1b5fbe 100644
--- a/tools/cts-tradefed/res/config/cts-on-gsi-on-r.xml
+++ b/tools/cts-tradefed/res/config/cts-on-gsi-on-r.xml
@@ -59,4 +59,19 @@
     <option name="compatibility:exclude-filter" value="CtsIdentityTestCases android.security.identity.cts.ReaderAuthTest" />
     <option name="compatibility:exclude-filter" value="CtsIdentityTestCases android.security.identity.cts.UserAuthTest" />
 
+    <!-- CtsIdentityTestCases: b/282102975: excluding tests to be disabled. -->
+    <option name="compatibility:exclude-filter" value="CtsIdentityTestCases android.security.identity.cts.AttestationTest" />
+    <option name="compatibility:exclude-filter" value="CtsIdentityTestCases android.security.identity.cts.DynamicAuthTest" />
+    <option name="compatibility:exclude-filter" value="CtsIdentityTestCases android.security.identity.cts.EphemeralKeyTest" />
+    <option name="compatibility:exclude-filter" value="CtsIdentityTestCases android.security.identity.cts.ProvisioningTest" />
+    <option name="compatibility:exclude-filter" value="CtsIdentityTestCases android.security.identity.cts.ReaderAuthTest" />
+    <option name="compatibility:exclude-filter" value="CtsIdentityTestCases android.security.identity.cts.UserAuthTest" />
+
+    <!-- CtsKeystoreTestCases: b/262010816  -->
+    <option name="compatibility:exclude-filter" value="CtsKeystoreTestCases android.keystore.cts.ECDSASignatureTest" />
+    <!-- CtsKeystoreTestCases: b/264546541 -->
+    <option name="compatibility:exclude-filter" value="CtsKeystoreTestCases android.keystore.cts.NoAttestKeyTest" />
+    <!-- CtsMediaMiscTestCases: b/261813452 -->
+    <option name="compatibility:exclude-filter" value="CtsMediaMiscTestCases android.media.misc.cts.HeifWriterTest" />
+
 </configuration>
diff --git a/tools/cts-tradefed/res/config/cts-on-gsi-on-s.xml b/tools/cts-tradefed/res/config/cts-on-gsi-on-s.xml
index 463ac1a..c7d3d8b 100644
--- a/tools/cts-tradefed/res/config/cts-on-gsi-on-s.xml
+++ b/tools/cts-tradefed/res/config/cts-on-gsi-on-s.xml
@@ -60,4 +60,7 @@
     <option name="compatibility:exclude-filter" value="CtsMediaMiscTestCases android.media.misc.cts.ResourceManagerTest#testAVCVideoCodecReclaimHighResolution" />
     <option name="compatibility:exclude-filter" value="CtsMediaMiscTestCases android.media.misc.cts.ResourceManagerTest#testHEVCVideoCodecReclaimHighResolution" />
 
+    <!-- CtsNativeMediaAAudioTestCases: b/261824947 -->
+    <option name="compatibility:exclude-filter" value="CtsNativeMediaAAudioTestCases android.nativemedia.aaudio.AAudioTests" />
+
 </configuration>