Merge "Add CTS test for new roaming state APIs in NetworkRegistrationInfo"
diff --git a/OWNERS b/OWNERS
index 8771ccd..adb9461 100644
--- a/OWNERS
+++ b/OWNERS
@@ -12,6 +12,6 @@
 
 # Android EngProd Approvers
 wenshan@google.com
-guangzhu@google.com
-jdesprez@google.com
+guangzhu@google.com #{LAST_RESORT_SUGGESTION}
+jdesprez@google.com #{LAST_RESORT_SUGGESTION}
 normancheung@google.com #{LAST_RESORT_SUGGESTION}
\ No newline at end of file
diff --git a/apps/CtsVerifier/AndroidManifest.xml b/apps/CtsVerifier/AndroidManifest.xml
index e357e8f..2001b7b 100644
--- a/apps/CtsVerifier/AndroidManifest.xml
+++ b/apps/CtsVerifier/AndroidManifest.xml
@@ -5445,6 +5445,7 @@
                 <category android:name="android.cts.intent.category.MANUAL_TEST" />
             </intent-filter>
             <meta-data android:name="test_category" android:value="@string/test_category_audio" />
+            <meta-data android:name="test_required_features" android:value="android.hardware.hdmi.cec" />
             <meta-data android:name="display_mode" android:value="multi_display_mode" />
             <meta-data android:name="ApiTest"
                 android:value="android.media.AudioDescriptor#getStandard|
diff --git a/common/device-side/util-axt/src/com/android/compatibility/common/util/OWNERS b/common/device-side/util-axt/src/com/android/compatibility/common/util/OWNERS
index 2e17a8c..209ce99 100644
--- a/common/device-side/util-axt/src/com/android/compatibility/common/util/OWNERS
+++ b/common/device-side/util-axt/src/com/android/compatibility/common/util/OWNERS
@@ -1,6 +1,9 @@
 per-file AmMonitor.java = file:platform/frameworks/base:/services/core/java/com/android/server/am/OWNERS
 per-file AnrMonitor.java = file:platform/frameworks/base:/services/core/java/com/android/server/am/OWNERS
 per-file BaseDefaultPermissionGrantPolicyTest.java = file:platform/frameworks/base:/core/java/android/permission/DEFAULT_PERMISSION_GRANT_POLICY_OWNERS
+per-file CtsKeyEventUtil.java = file:platform/cts:/tests/tests/widget/OWNERS
+per-file CtsMouseUtil.java = file:platform/cts:/tests/tests/widget/OWNERS
+per-file CtsTouchUtils.java = file:platform/cts:/tests/tests/widget/OWNERS
 per-file ReadElf.java = enh@google.com
 per-file *Settings*.java = felipeal@google.com
 per-file User*Helper*.java = felipeal@google.com
diff --git a/hostsidetests/incident/AndroidTest.xml b/hostsidetests/incident/AndroidTest.xml
index 727277d..73be7b4 100644
--- a/hostsidetests/incident/AndroidTest.xml
+++ b/hostsidetests/incident/AndroidTest.xml
@@ -20,9 +20,6 @@
     <option name="config-descriptor:metadata" key="parameter" value="multi_abi" />
     <option name="config-descriptor:metadata" key="parameter" value="not_secondary_user" />
     <option name="config-descriptor:metadata" key="parameter" value="no_foldable_states" />
-    <target_preparer class="com.android.tradefed.targetprep.SwitchUserTargetPreparer">
-        <option name="user-type" value="system" />
-    </target_preparer>
     <test class="com.android.compatibility.common.tradefed.testtype.JarHostTest" >
         <option name="jar" value="CtsIncidentHostTestCases.jar" />
     </test>
diff --git a/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityFingerprintGestureTest.java b/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityFingerprintGestureTest.java
index e36d1ee..64cc0f1 100644
--- a/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityFingerprintGestureTest.java
+++ b/tests/accessibilityservice/src/android/accessibilityservice/cts/AccessibilityFingerprintGestureTest.java
@@ -18,6 +18,7 @@
 
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
+import static org.junit.Assume.assumeTrue;
 import static org.mockito.Mockito.reset;
 import static org.mockito.Mockito.timeout;
 import static org.mockito.Mockito.verify;
@@ -90,9 +91,10 @@
 
     @Test
     public void testGestureDetectionListener_whenAuthenticationStartsAndStops_calledBack() {
-        if (!mFingerprintGestureController.isGestureDetectionAvailable()) {
-            return;
-        }
+        assumeTrue("Fingerprint gesture detection is not available",
+                mFingerprintGestureController.isGestureDetectionAvailable());
+        assumeTrue("No enrolled fingerprints; cannot open fingerprint prompt",
+                mFingerprintManager.hasEnrolledFingerprints());
         // Launch an activity to make sure we're in the foreground
         mActivityRule.launchActivity(null);
         mFingerprintGestureController.registerFingerprintGestureCallback(
diff --git a/tests/autofillservice/res/layout/scrollable_login_activity.xml b/tests/autofillservice/res/layout/scrollable_login_activity.xml
new file mode 100644
index 0000000..dfa5227
--- /dev/null
+++ b/tests/autofillservice/res/layout/scrollable_login_activity.xml
@@ -0,0 +1,39 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+-->
+
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    xmlns:tools="http://schemas.android.com/tools"
+    android:layout_width="match_parent"
+    android:layout_height="match_parent"
+    android:focusable="true"
+    android:focusableInTouchMode="true"
+    android:orientation="vertical" >
+
+    <ScrollView android:layout_width="match_parent"
+                android:layout_height="wrap_content" >
+
+        <LinearLayout
+            android:layout_width="match_parent"
+            android:layout_height="wrap_content" >
+
+            <include layout="@layout/login_activity" />
+
+        </LinearLayout>
+
+    </ScrollView>
+
+</LinearLayout>
diff --git a/tests/autofillservice/src/android/autofillservice/cts/SessionLifecycleTest.java b/tests/autofillservice/src/android/autofillservice/cts/SessionLifecycleTest.java
index 7f5e8d3..e8a58bd 100644
--- a/tests/autofillservice/src/android/autofillservice/cts/SessionLifecycleTest.java
+++ b/tests/autofillservice/src/android/autofillservice/cts/SessionLifecycleTest.java
@@ -542,10 +542,9 @@
         // It works fine for portrait but for the platforms that the default orientation
         // is landscape, e.g. automotive. Depending on the height of the IME, the ID_LOGIN
         // button may not be visible.
-        // In order to avoid that,
-        // generate back key event to hide IME before pressing ID_LOGIN button.
-        mUiBot.pressBack();
 
+        // In order to avoid that, scroll until the ID_LOGIN button appears.
+        mUiBot.scrollToTextObject(ID_LOGIN);
         mUiBot.selectByRelativeId(ID_LOGIN);
         mUiBot.assertSaveShowing(SAVE_DATA_TYPE_USERNAME);
 
diff --git a/tests/autofillservice/src/android/autofillservice/cts/activities/OutOfProcessLoginActivity.java b/tests/autofillservice/src/android/autofillservice/cts/activities/OutOfProcessLoginActivity.java
index 4cab12c..ca91090 100644
--- a/tests/autofillservice/src/android/autofillservice/cts/activities/OutOfProcessLoginActivity.java
+++ b/tests/autofillservice/src/android/autofillservice/cts/activities/OutOfProcessLoginActivity.java
@@ -41,7 +41,7 @@
         Log.i(TAG, "onCreate(" + savedInstanceState + ")");
         super.onCreate(savedInstanceState);
 
-        setContentView(R.layout.login_activity);
+        setContentView(R.layout.scrollable_login_activity);
 
         findViewById(R.id.login).setOnClickListener((v) -> finish());
 
diff --git a/tests/framework/base/windowmanager/src/android/server/wm/KeepClearRectsTests.java b/tests/framework/base/windowmanager/src/android/server/wm/KeepClearRectsTests.java
index 55eae8c..81173bb 100644
--- a/tests/framework/base/windowmanager/src/android/server/wm/KeepClearRectsTests.java
+++ b/tests/framework/base/windowmanager/src/android/server/wm/KeepClearRectsTests.java
@@ -361,6 +361,7 @@
 
         final Rect keepClearRect = new Rect(0, 0, 25, 25);
         final View v = createTestViewInActivity(activity, keepClearRect);
+        final List<Rect> prevKeepClearRectsOnDisplay = getKeepClearRectsOnDefaultDisplay();
         mTestSession.runOnMainSyncAndWait(() -> v.setPreferKeepClear(true));
         assertSameElementsEventually(Arrays.asList(keepClearRect),
                 () -> getKeepClearRectsForActivity(activity));
@@ -372,15 +373,16 @@
         assertSameElementsEventually(TEST_KEEP_CLEAR_RECTS,
                 () -> getKeepClearRectsForActivity(activity));
 
-        final List<Rect> expectedRectsInScreenSpace =
-                getRectsInScreenSpace(TEST_KEEP_CLEAR_RECTS, activity.getComponentName());
-        assertSameElementsEventually(expectedRectsInScreenSpace,
+        final List<Rect> expectedRectsOnDisplay = new ArrayList<Rect>();
+        expectedRectsOnDisplay.addAll(prevKeepClearRectsOnDisplay);
+        expectedRectsOnDisplay.addAll(
+                getRectsInScreenSpace(TEST_KEEP_CLEAR_RECTS, activity.getComponentName()));
+        assertSameElementsEventually(expectedRectsOnDisplay,
                 () -> getKeepClearRectsOnDefaultDisplay());
 
         activity.finishAndRemoveTask();
-        assertTrue(Collections.disjoint(
-                expectedRectsInScreenSpace,
-                getKeepClearRectsOnDefaultDisplay()));
+        assertSameElementsEventually(prevKeepClearRectsOnDisplay,
+                () -> getKeepClearRectsOnDefaultDisplay());
     }
 
     @Test
@@ -419,6 +421,7 @@
 
         final Rect viewBounds = new Rect(0, 0, 25, 25);
         final View v1 = createTestViewInActivity(activity1, viewBounds);
+        final List<Rect> prevKeepClearRectsOnDisplay = getKeepClearRectsOnDefaultDisplay();
         mTestSession.runOnMainSyncAndWait(() -> v1.setPreferKeepClear(true));
         assertSameElementsEventually(Arrays.asList(viewBounds),
                 () -> getKeepClearRectsForActivity(activity1));
@@ -438,8 +441,12 @@
         mWmState.assertVisibility(activity2.getComponentName(), true);
 
         // Since both activities are fullscreen, WM only takes the keep clear areas from the top one
-        assertSameElementsEventually(getRectsInScreenSpace(TEST_KEEP_CLEAR_RECTS,
-                activity2.getComponentName()), () -> getKeepClearRectsOnDefaultDisplay());
+        final List<Rect> expectedRectsOnDisplay = new ArrayList<Rect>();
+        expectedRectsOnDisplay.addAll(prevKeepClearRectsOnDisplay);
+        expectedRectsOnDisplay.addAll(getRectsInScreenSpace(TEST_KEEP_CLEAR_RECTS,
+                activity2.getComponentName()));
+        assertSameElementsEventually(expectedRectsOnDisplay,
+                () -> getKeepClearRectsOnDefaultDisplay());
     }
 
     @Test
@@ -449,15 +456,24 @@
         translucentTestSession.launchTestActivityOnDisplaySync(
                 TranslucentTestActivity.class, DEFAULT_DISPLAY);
         final TestActivity activity1 = translucentTestSession.getActivity();
-
         final Rect viewBounds = new Rect(0, 0, 25, 25);
         final View v1 = createTestViewInActivity(activity1, viewBounds);
+        final List<Rect> prevKeepClearRectsOnDisplay = getKeepClearRectsOnDefaultDisplay();
         translucentTestSession.runOnMainSyncAndWait(() -> v1.setPreferKeepClear(true));
-        assertSameElementsEventually(getRectsInScreenSpace(Arrays.asList(viewBounds),
-                activity1.getComponentName()), () -> getKeepClearRectsOnDefaultDisplay());
 
+        // Add keep-clear rects in the activity
+        final List<Rect> expectedRectsOnDisplay = new ArrayList<Rect>();
+        expectedRectsOnDisplay.addAll(prevKeepClearRectsOnDisplay);
+        expectedRectsOnDisplay.addAll(getRectsInScreenSpace(Arrays.asList(viewBounds),
+                activity1.getComponentName()));
+        assertSameElementsEventually(expectedRectsOnDisplay,
+                () -> getKeepClearRectsOnDefaultDisplay());
+
+        // Start an opaque activity on top
         mTestSession.launchTestActivityOnDisplaySync(TestActivity.class, DEFAULT_DISPLAY);
         final TestActivity activity2 = mTestSession.getActivity();
+
+        // Add keep-clear rects in the opaque activity
         final View v2 = createTestViewInActivity(activity2);
         mTestSession.runOnMainSyncAndWait(() -> v2.setPreferKeepClearRects(TEST_KEEP_CLEAR_RECTS));
         assertSameElementsEventually(TEST_KEEP_CLEAR_RECTS,
@@ -466,8 +482,13 @@
         mWmState.waitAndAssertVisibilityGone(activity1.getComponentName());
         mWmState.assertVisibility(activity2.getComponentName(), true);
 
-        assertSameElementsEventually(TEST_KEEP_CLEAR_RECTS,
-                () -> getKeepClearRectsForActivity(activity2));
+        // Only the opaque activity's keep-clear areas should be reported on the display
+        expectedRectsOnDisplay.clear();
+        expectedRectsOnDisplay.addAll(prevKeepClearRectsOnDisplay);
+        expectedRectsOnDisplay.addAll(getRectsInScreenSpace(
+                    TEST_KEEP_CLEAR_RECTS, activity2.getComponentName()));
+        assertSameElementsEventually(expectedRectsOnDisplay,
+                () -> getKeepClearRectsOnDefaultDisplay());
     }
 
     @Test
@@ -475,30 +496,15 @@
         assumeTrue("Skipping test: no split multi-window support",
                 supportsSplitScreenMultiWindow());
 
-        final LaunchActivityBuilder activityBuilder1 = getLaunchActivityBuilder()
-                .setUseInstrumentation()
-                .setIntentExtra(extra -> {
-                    extra.putParcelableArrayList(EXTRA_KEEP_CLEAR_RECTS,
-                            new ArrayList(TEST_KEEP_CLEAR_RECTS));
-                })
-                .setTargetActivity(KEEP_CLEAR_RECTS_ACTIVITY);
+        startKeepClearActivitiesInSplitscreen(KEEP_CLEAR_RECTS_ACTIVITY,
+                KEEP_CLEAR_RECTS_ACTIVITY2, Collections.emptyList(), Collections.emptyList());
+        final List<Rect> prevKeepClearRectsOnDisplay = getKeepClearRectsOnDefaultDisplay();
 
-        final LaunchActivityBuilder activityBuilder2 = getLaunchActivityBuilder()
-                .setUseInstrumentation()
-                .setIntentExtra(extra -> {
-                    extra.putParcelableArrayList(EXTRA_KEEP_CLEAR_RECTS,
-                            new ArrayList(TEST_KEEP_CLEAR_RECTS_2));
-                })
-                .setTargetActivity(KEEP_CLEAR_RECTS_ACTIVITY2);
+        removeRootTask(mWmState.getTaskByActivity(KEEP_CLEAR_RECTS_ACTIVITY).mTaskId);
+        removeRootTask(mWmState.getTaskByActivity(KEEP_CLEAR_RECTS_ACTIVITY2).mTaskId);
 
-        launchActivitiesInSplitScreen(activityBuilder1, activityBuilder2);
-
-        waitAndAssertResumedActivity(KEEP_CLEAR_RECTS_ACTIVITY, KEEP_CLEAR_RECTS_ACTIVITY
-                + " must be resumed");
-        waitAndAssertResumedActivity(KEEP_CLEAR_RECTS_ACTIVITY2, KEEP_CLEAR_RECTS_ACTIVITY2
-                + " must be resumed");
-        mWmState.assertVisibility(KEEP_CLEAR_RECTS_ACTIVITY, true);
-        mWmState.assertVisibility(KEEP_CLEAR_RECTS_ACTIVITY2, true);
+        startKeepClearActivitiesInSplitscreen(KEEP_CLEAR_RECTS_ACTIVITY,
+                KEEP_CLEAR_RECTS_ACTIVITY2, TEST_KEEP_CLEAR_RECTS, TEST_KEEP_CLEAR_RECTS_2);
 
         assertSameElementsEventually(TEST_KEEP_CLEAR_RECTS,
                 () -> getKeepClearRectsForActivity(KEEP_CLEAR_RECTS_ACTIVITY));
@@ -506,11 +512,38 @@
                 () -> getKeepClearRectsForActivity(KEEP_CLEAR_RECTS_ACTIVITY2));
 
         final List<Rect> expected = new ArrayList();
+        expected.addAll(prevKeepClearRectsOnDisplay);
         expected.addAll(getRectsInScreenSpace(TEST_KEEP_CLEAR_RECTS, KEEP_CLEAR_RECTS_ACTIVITY));
         expected.addAll(getRectsInScreenSpace(TEST_KEEP_CLEAR_RECTS_2, KEEP_CLEAR_RECTS_ACTIVITY2));
         assertSameElementsEventually(expected, () -> getKeepClearRectsOnDefaultDisplay());
     }
 
+    private void startKeepClearActivitiesInSplitscreen(ComponentName activity1,
+            ComponentName activity2, List<Rect> keepClearRects1, List<Rect> keepClearRects2) {
+        final LaunchActivityBuilder activityBuilder1 = getLaunchActivityBuilder()
+                .setUseInstrumentation()
+                .setTargetActivity(activity1)
+                .setIntentExtra(extra -> {
+                    extra.putParcelableArrayList(EXTRA_KEEP_CLEAR_RECTS,
+                            new ArrayList(keepClearRects1));
+                });
+
+        final LaunchActivityBuilder activityBuilder2 = getLaunchActivityBuilder()
+                .setUseInstrumentation()
+                .setTargetActivity(activity2)
+                .setIntentExtra(extra -> {
+                    extra.putParcelableArrayList(EXTRA_KEEP_CLEAR_RECTS,
+                            new ArrayList(keepClearRects2));
+                });
+
+        launchActivitiesInSplitScreen(activityBuilder1, activityBuilder2);
+
+        waitAndAssertResumedActivity(activity1, activity1 + " must be resumed");
+        waitAndAssertResumedActivity(activity2, activity2 + " must be resumed");
+        mWmState.assertVisibility(activity1, true);
+        mWmState.assertVisibility(activity2, true);
+    }
+
     @Test
     public void testUnrestrictedKeepClearRects() throws Exception {
         mTestSession.launchTestActivityOnDisplaySync(TestActivity.class, DEFAULT_DISPLAY);
@@ -547,6 +580,7 @@
         mTestSession.runOnMainSyncAndWait(() -> {
             activity.addView(newView, params);
         });
+        waitForIdle();
         return newView;
     }
 
@@ -607,7 +641,8 @@
 
     private static <T> void assertSameElementsEventually(List<T> expected, Callable<List<T>> actual)
             throws Exception {
-        PollingCheck.check("Lists do not have the same elements.",
+        PollingCheck.check("Lists do not have the same elements."
+                + "Expected=" + expected + ", actual=" + actual.call(),
                 SAME_ELEMENT_ASSERTION_TIMEOUT,
                 () -> hasSameElements(expected, actual.call()));
     }
diff --git a/tests/media/AndroidManifest.xml b/tests/media/AndroidManifest.xml
index 3a97bbe..0a1f5e0 100644
--- a/tests/media/AndroidManifest.xml
+++ b/tests/media/AndroidManifest.xml
@@ -23,6 +23,7 @@
 
     <uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
     <uses-permission android:name="android.permission.MANAGE_EXTERNAL_STORAGE" />
+    <uses-permission android:name="android.permission.CAMERA" />
 
     <application
         android:requestLegacyExternalStorage="true"
diff --git a/tests/media/common/src/android/mediav2/common/cts/BitStreamUtils.java b/tests/media/common/src/android/mediav2/common/cts/BitStreamUtils.java
index 616eaa4..66ed70f 100644
--- a/tests/media/common/src/android/mediav2/common/cts/BitStreamUtils.java
+++ b/tests/media/common/src/android/mediav2/common/cts/BitStreamUtils.java
@@ -16,6 +16,7 @@
 
 package android.mediav2.common.cts;
 
+import static android.media.MediaCodecInfo.CodecProfileLevel.*;
 import static android.media.MediaFormat.PICTURE_TYPE_B;
 import static android.media.MediaFormat.PICTURE_TYPE_I;
 import static android.media.MediaFormat.PICTURE_TYPE_P;
@@ -23,11 +24,13 @@
 
 import android.media.MediaCodec;
 import android.media.MediaFormat;
+import android.util.Pair;
 
 import org.junit.Assert;
 
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
+import java.util.HashMap;
 
 /**
  * This class contains utility functions that parse compressed bitstream and returns metadata
@@ -38,9 +41,15 @@
  * Currently this class hosts utils that can,
  * <ul>
  *     <li>Return frame type of the access units of avc, hevc, av1.</li>
+ *     <li>Return profile/level information of avc, hevc, av1, vp9, mpeg4, h263, aac</li>
  * </ul>
  */
 public class BitStreamUtils {
+    public static int getHashMapVal(HashMap<Integer, Integer> obj, int key) {
+        Integer val = obj.get(key);
+        return val == null ? -1 : val;
+    }
+
     static class ParsableBitArray {
         protected final byte[] mData;
         protected final int mOffset;
@@ -71,9 +80,9 @@
         }
 
         public int readBits(int numBits) {
-            if (numBits > 31) {
-                throw new IllegalArgumentException(
-                        "left shift overflow exception, reading too many bits at one go");
+            if (numBits > 32) {
+                throw new IllegalArgumentException("readBits Exception: maximum storage space of "
+                        + "return value of readBits : 32, less than bits to read : " + numBits);
             }
             int value = 0;
             for (int i = 0; i < numBits; i++) {
@@ -84,9 +93,9 @@
         }
 
         public long readBitsLong(int numBits) {
-            if (numBits > 63) {
-                throw new IllegalArgumentException(
-                        "left shift overflow exception, reading too many bits at one go");
+            if (numBits > 64) {
+                throw new IllegalArgumentException("readBitsLong Exception: maximum storage space "
+                        + "of return value of readBits : 64, less than bits to read : " + numBits);
             }
             long value = 0;
             for (int i = 0; i < numBits; i++) {
@@ -168,10 +177,131 @@
         }
 
         public abstract int getFrameType();
+
+        public abstract Pair<Integer, Integer> getProfileLevel(boolean isCsd);
+
+        // .first = profile, .second = level
+        public Pair<Integer, Integer> plToPair(int profile, int level) {
+            return Pair.create(profile, level);
+        }
+    }
+
+    static class Mpeg4Parser extends ParserBase {
+        @Override
+        public int getFrameType() {
+            return PICTURE_TYPE_UNKNOWN;
+        }
+
+        @Override
+        public Pair<Integer, Integer> getProfileLevel(@SuppressWarnings("unused") boolean isCsd) {
+            ParsableBitArray bitArray = new ParsableBitArray(mData, mOffset, mLimit);
+            Assert.assertEquals(0, bitArray.readBits(8));
+            Assert.assertEquals(0, bitArray.readBits(8));
+            Assert.assertEquals(1, bitArray.readBits(8));
+            Assert.assertEquals(0xb0, bitArray.readBits(8));
+            int profileLevel = bitArray.readBits(8);
+            switch (profileLevel) {
+                case 0x08: return plToPair(MPEG4ProfileSimple, MPEG4Level0);
+                case 0x01: return plToPair(MPEG4ProfileSimple, MPEG4Level1);
+                case 0x02: return plToPair(MPEG4ProfileSimple, MPEG4Level2);
+                case 0x03: return plToPair(MPEG4ProfileSimple, MPEG4Level3);
+                case 0xf0: return plToPair(MPEG4ProfileAdvancedSimple, MPEG4Level0);
+                case 0xf1: return plToPair(MPEG4ProfileAdvancedSimple, MPEG4Level1);
+                case 0xf2: return plToPair(MPEG4ProfileAdvancedSimple, MPEG4Level2);
+                case 0xf3: return plToPair(MPEG4ProfileAdvancedSimple, MPEG4Level3);
+                case 0xf7: return plToPair(MPEG4ProfileAdvancedSimple, MPEG4Level3b);
+                case 0xf4: return plToPair(MPEG4ProfileAdvancedSimple, MPEG4Level4);
+                case 0xf5: return plToPair(MPEG4ProfileAdvancedSimple, MPEG4Level5);
+                default: return null;
+            }
+        }
+    }
+
+    static class H263Parser extends ParserBase {
+        @Override
+        public int getFrameType() {
+            return PICTURE_TYPE_UNKNOWN;
+        }
+
+        @Override
+        public Pair<Integer, Integer> getProfileLevel(@SuppressWarnings("unused") boolean isCsd) {
+            ParsableBitArray bitArray = new ParsableBitArray(mData, mOffset, mLimit);
+            Assert.assertEquals("bad psc", 0x20, bitArray.readBits(22));
+            bitArray.readBits(8); // tr
+            Assert.assertEquals(1, bitArray.readBits(1));
+            Assert.assertEquals(0, bitArray.readBits(1));
+            bitArray.readBits(1);  // split screen
+            bitArray.readBits(1);  // camera indicator
+            bitArray.readBits(1);  // freeze indicator
+            int sourceFormat = bitArray.readBits(3);
+            int picType;
+            int umv = 0, sac = 0, ap = 0, pb = 0;
+            int aic = 0, df = 0, ss = 0, rps = 0, isd = 0, aiv = 0, mq = 0;
+            int rpr = 0, rru = 0;
+            if (sourceFormat == 7) {
+                int ufep = bitArray.readBits(3);
+                if (ufep == 1) {
+                    sourceFormat = bitArray.readBits(3);
+                    bitArray.readBits(1); // custom pcf
+                    umv = bitArray.readBits(1);
+                    sac = bitArray.readBits(1);
+                    ap = bitArray.readBits(1);
+                    aic = bitArray.readBits(1);
+                    df = bitArray.readBits(1);
+                    ss = bitArray.readBits(1);
+                    rps = bitArray.readBits(1);
+                    isd = bitArray.readBits(1);
+                    aiv = bitArray.readBits(1);
+                    mq = bitArray.readBits(1);
+                    Assert.assertEquals(1, bitArray.readBits(1));
+                    Assert.assertEquals(0, bitArray.readBits(3));
+                }
+                picType = bitArray.readBits(3);
+                rpr = bitArray.readBits(1);
+                rru = bitArray.readBits(1);
+                bitArray.readBits(1);  // rtype
+                Assert.assertEquals(0, bitArray.readBits(1));  // reserved
+                Assert.assertEquals(0, bitArray.readBits(1));  // reserved
+                Assert.assertEquals(1, bitArray.readBits(1));  // start code emulation
+            } else {
+                picType = bitArray.readBits(1);
+                umv = bitArray.readBits(1);
+                sac = bitArray.readBits(1);
+                ap = bitArray.readBits(1);
+                pb = bitArray.readBits(1);
+            }
+            int profile = H263ProfileBaseline;
+            if (ap == 1) profile = H263ProfileBackwardCompatible;
+            if (aic == 1 && df == 1 && ss == 1 && mq == 1) profile = H263ProfileISWV2;
+            return plToPair(profile, -1);
+        }
     }
 
     static class AvcParser extends ParserBase {
         private static final int NO_NAL_UNIT_FOUND = -1;
+        private static final HashMap<Integer, Integer> LEVEL_MAP = new HashMap<>() {
+            {
+                put(10, AVCLevel1);
+                put(11, AVCLevel11);
+                put(12, AVCLevel12);
+                put(13, AVCLevel13);
+                put(20, AVCLevel2);
+                put(21, AVCLevel21);
+                put(22, AVCLevel22);
+                put(30, AVCLevel3);
+                put(31, AVCLevel31);
+                put(32, AVCLevel32);
+                put(40, AVCLevel4);
+                put(41, AVCLevel41);
+                put(42, AVCLevel42);
+                put(50, AVCLevel5);
+                put(51, AVCLevel51);
+                put(52, AVCLevel52);
+                put(60, AVCLevel6);
+                put(61, AVCLevel61);
+                put(62, AVCLevel62);
+            }
+        };
 
         private int getNalUnitStartOffset(byte[] dataArray, int start, int limit) {
             for (int pos = start; pos + 3 < limit; pos++) {
@@ -198,7 +328,8 @@
                 int nalUnitType = getNalUnitType(mData, offset);
                 if (nalUnitType == 1 || nalUnitType == 2 || nalUnitType == 5) {  // coded slice
                     NalParsableBitArray bitArray = new NalParsableBitArray(mData, offset, mLimit);
-                    bitArray.readBits(8); // forbidden zero bit + nal_ref_idc + nal_unit_type
+                    Assert.assertEquals(0, bitArray.readBits(1)); // forbidden zero bit
+                    bitArray.readBits(7); // nal_ref_idc + nal_unit_type
                     bitArray.readUEV(); // first_mb_in_slice
                     int sliceType = bitArray.readUEV();
                     if (sliceType % 5 == 0) {
@@ -215,6 +346,77 @@
             }
             return PICTURE_TYPE_UNKNOWN;
         }
+
+        @Override
+        public Pair<Integer, Integer> getProfileLevel(@SuppressWarnings("unused") boolean isCsd) {
+            for (int pos = mOffset; pos < mLimit; ) {
+                int offset = getNalUnitStartOffset(mData, pos, mLimit);
+                if (offset == NO_NAL_UNIT_FOUND) return null;
+                if (getNalUnitType(mData, offset) == 7) { // seq_parameter_set_rbsp
+                    NalParsableBitArray bitArray = new NalParsableBitArray(mData, offset, mLimit);
+                    Assert.assertEquals(0, bitArray.readBits(1)); // forbidden zero bit
+                    bitArray.readBits(7); // nal_ref_idc + nal_unit_type
+                    int profileIdc = bitArray.readBits(8);
+                    int constraintSet0Flag = bitArray.readBits(1);
+                    int constraintSet1Flag = bitArray.readBits(1);
+                    int constraintSet2Flag = bitArray.readBits(1);
+                    int constraintSet3Flag = bitArray.readBits(1);
+                    int constraintSet4Flag = bitArray.readBits(1);
+                    int constraintSet5Flag = bitArray.readBits(1);
+                    Assert.assertEquals(0, bitArray.readBits(2)); // reserved zero 2 bits
+                    int levelIdc = bitArray.readBits(8);
+
+                    int profile = -1;
+                    if (constraintSet0Flag == 1 || profileIdc == 66) {
+                        profile = constraintSet1Flag == 1 ? AVCProfileConstrainedBaseline :
+                                AVCProfileBaseline;
+                    } else if (constraintSet1Flag == 1 || profileIdc == 77) {
+                        profile = AVCProfileMain;
+                    } else if (constraintSet2Flag == 1 || profileIdc == 88) {
+                        profile = AVCProfileExtended;
+                    } else if (profileIdc == 100) {
+                        profile = (constraintSet4Flag == 1 && constraintSet5Flag == 1)
+                                ? AVCProfileConstrainedHigh : AVCProfileHigh;
+                    } else if (profileIdc == 110) {
+                        profile = AVCProfileHigh10;
+                    } else if (profileIdc == 122) {
+                        profile = AVCProfileHigh422;
+                    } else if (profileIdc == 244) {
+                        profile = AVCProfileHigh444;
+                    }
+
+                    // In bitstreams conforming to the Baseline, Constrained Baseline, Main, or
+                    // Extended profiles :
+                    // - If level_idc is equal to 11 and constraint_set3_flag is equal to 1, the
+                    // indicated level is level 1b.
+                    // - Otherwise (level_idc is not equal to 11 or constraint_set3_flag is not
+                    // equal to 1), level_idc is equal to a value of ten times the level number
+                    // (of the indicated level) specified in Table A-1.
+                    int level;
+                    if ((levelIdc == 11) && (profile == AVCProfileBaseline
+                            || profile == AVCProfileConstrainedBaseline || profile == AVCProfileMain
+                            || profile == AVCProfileExtended)) {
+                        level = constraintSet3Flag == 1 ? AVCLevel1b : AVCLevel11;
+                    } else if ((levelIdc == 9) && (profile == AVCProfileHigh
+                            || profile == AVCProfileHigh10 || profile == AVCProfileHigh422
+                            || profile == AVCProfileHigh444)) {
+                        // In bitstreams conforming to the High, High 10, High 4:2:2, High 4:4:4
+                        // Predictive, High 10 Intra, High 4:2:2 Intra, High 4:4:4 Intra, or
+                        // CAVLC 4:4:4 Intra profiles,
+                        // - If level_idc is equal to 9, the indicated level is level 1b.
+                        // - Otherwise (level_idc is not equal to 9), level_idc is equal to a
+                        // value of ten times the level number (of the indicated level) specified
+                        // in Table A-1
+                        level = AVCLevel1b;
+                    } else {
+                        level = getHashMapVal(LEVEL_MAP, levelIdc);
+                    }
+                    return plToPair(profile, level);
+                }
+                pos = offset;
+            }
+            return null;
+        }
     }
 
     static class HevcParser extends ParserBase {
@@ -223,6 +425,35 @@
         private static final int RASL_R = 9;
         private static final int BLA_W_LP = 16;
         private static final int RSV_IRAP_VCL23 = 23;
+        private static final HashMap<Integer, Integer> LEVEL_MAP_MAIN_TIER = new HashMap<>() {
+            {
+                put(30, HEVCMainTierLevel1);
+                put(60, HEVCMainTierLevel2);
+                put(63, HEVCMainTierLevel21);
+                put(90, HEVCMainTierLevel3);
+                put(93, HEVCMainTierLevel31);
+                put(120, HEVCMainTierLevel4);
+                put(123, HEVCMainTierLevel41);
+                put(150, HEVCMainTierLevel5);
+                put(153, HEVCMainTierLevel51);
+                put(156, HEVCMainTierLevel52);
+                put(180, HEVCMainTierLevel6);
+                put(183, HEVCMainTierLevel61);
+                put(186, HEVCMainTierLevel62);
+            }
+        };
+        private static final HashMap<Integer, Integer> LEVEL_MAP_HIGH_TIER = new HashMap<>() {
+            {
+                put(120, HEVCHighTierLevel4);
+                put(123, HEVCHighTierLevel41);
+                put(150, HEVCHighTierLevel5);
+                put(153, HEVCHighTierLevel51);
+                put(156, HEVCHighTierLevel52);
+                put(180, HEVCHighTierLevel6);
+                put(183, HEVCHighTierLevel61);
+                put(186, HEVCHighTierLevel62);
+            }
+        };
 
         private int getNalUnitStartOffset(byte[] dataArray, int start, int limit) {
             for (int pos = start; pos + 3 < limit; pos++) {
@@ -275,6 +506,120 @@
             }
             return PICTURE_TYPE_UNKNOWN;
         }
+
+        @Override
+        public Pair<Integer, Integer> getProfileLevel(@SuppressWarnings("unused") boolean isCsd) {
+            for (int pos = mOffset; pos < mLimit; ) {
+                int offset = getNalUnitStartOffset(mData, pos, mLimit);
+                if (offset == NO_NAL_UNIT_FOUND) return null;
+                if (getNalUnitType(mData, offset) == 33) { // sps_nut
+                    NalParsableBitArray bitArray = new NalParsableBitArray(mData, offset, mLimit);
+                    bitArray.readBits(16); // nal unit header
+                    bitArray.readBits(4); // sps video parameter set id
+                    bitArray.readBits(3); // sps_max_sub_layers_minus1
+                    bitArray.readBits(1); // sps temporal id nesting flag
+                    // profile_tier_level
+                    bitArray.readBits(2); // generalProfileSpace
+                    int generalTierFlag = bitArray.readBits(1);
+                    int generalProfileIdc = bitArray.readBits(5);
+                    int[] generalProfileCompatibility = new int[32];
+                    for (int j = 0; j < generalProfileCompatibility.length; j++) {
+                        generalProfileCompatibility[j] = bitArray.readBits(1);
+                    }
+                    bitArray.readBits(1); // general progressive source flag
+                    bitArray.readBits(1); // general interlaced source flag
+                    bitArray.readBits(1); // general non packed constraint flag
+                    bitArray.readBits(1); // general frame only constraint flag
+
+                    // interpretation of next 44 bits is dependent on generalProfileIdc and
+                    // generalProfileCompatibility; but we do not use them in this validation
+                    // process, so we're skipping over them.
+                    bitArray.readBitsLong(44);
+                    int generalLevelIdc = bitArray.readBits(8);
+
+                    int profile = -1;
+                    if (generalProfileIdc == 1 || generalProfileCompatibility[1] == 1) {
+                        profile = HEVCProfileMain;
+                    } else if (generalProfileIdc == 2 || generalProfileCompatibility[2] == 1) {
+                        profile = HEVCProfileMain10;
+                    } else if (generalProfileIdc == 3 || generalProfileCompatibility[3] == 1) {
+                        profile = HEVCProfileMainStill;
+                    }
+
+                    return plToPair(profile, getHashMapVal(
+                            generalTierFlag == 0 ? LEVEL_MAP_MAIN_TIER : LEVEL_MAP_HIGH_TIER,
+                            generalLevelIdc));
+                }
+                pos = offset;
+            }
+            return null;
+        }
+    }
+
+    static class Vp9Parser extends ParserBase {
+        private static final HashMap<Integer, Integer> PROFILE_MAP = new HashMap<>() {
+            {
+                put(0, VP9Profile0);
+                put(1, VP9Profile1);
+                put(2, VP9Profile2);
+                put(3, VP9Profile3);
+            }
+        };
+        private static final HashMap<Integer, Integer> LEVEL_MAP = new HashMap<>() {
+            {
+                put(10, VP9Level1);
+                put(11, VP9Level11);
+                put(20, VP9Level2);
+                put(21, VP9Level21);
+                put(30, VP9Level3);
+                put(31, VP9Level31);
+                put(40, VP9Level4);
+                put(41, VP9Level41);
+                put(50, VP9Level5);
+                put(51, VP9Level51);
+                put(60, VP9Level6);
+                put(61, VP9Level61);
+                put(62, VP9Level62);
+            }
+        };
+
+        private Pair<Integer, Integer> getProfileLevelFromCSD() { // parse vp9 codecprivate
+            int profile = -1, level = -1;
+            for (int pos = mOffset; pos < mLimit; ) {
+                ParsableBitArray bitArray = new ParsableBitArray(mData, pos + mOffset, mLimit);
+                int id = bitArray.readBits(8);
+                int len = bitArray.readBits(8);
+                pos += 2;
+                int val = bitArray.readBits(len * 8);
+                pos += len;
+                if (id == 1 || id == 2) {
+                    Assert.assertEquals(1, len);
+                    if (id == 1) profile = val;
+                    else level = val;
+                }
+                if (profile != -1 && level != -1) break;
+            }
+            return plToPair(getHashMapVal(PROFILE_MAP, profile), getHashMapVal(LEVEL_MAP, level));
+        }
+
+        private Pair<Integer, Integer> getProfileFromFrameHeader() { // parse uncompressed header
+            ParsableBitArray bitArray = new ParsableBitArray(mData, mOffset, mLimit);
+            bitArray.readBits(2); // frame marker
+            int profileLBit = bitArray.readBits(1);
+            int profileHBit = bitArray.readBits(1);
+            int profile = profileHBit << 1 + profileLBit;
+            return plToPair(getHashMapVal(PROFILE_MAP, profile), -1);
+        }
+
+        @Override
+        public int getFrameType() {
+            return PICTURE_TYPE_UNKNOWN;
+        }
+
+        @Override
+        public Pair<Integer, Integer> getProfileLevel(boolean isCsd) {
+            return isCsd ? getProfileLevelFromCSD() : getProfileFromFrameHeader();
+        }
     }
 
     static class Av1Parser extends ParserBase {
@@ -578,6 +923,48 @@
             return frameHeader;
         }
 
+        // parse av1 codec configuration record
+        private Pair<Integer, Integer> getProfileLevelFromCSD() {
+            int profile = -1;
+            ParsableBitArray bitArray = new ParsableBitArray(mData, mOffset, mLimit);
+            Assert.assertEquals(1, bitArray.readBits(1));  // marker
+            Assert.assertEquals(1, bitArray.readBits(7));  // version
+            int seqProfile = bitArray.readBits(3);
+            int seqLevelIdx0 = bitArray.readBits(5);
+            bitArray.readBits(1);  // seqTier0
+            int highBitDepth = bitArray.readBits(1);
+            bitArray.readBits(1);  // is input 12 bit;
+            if (seqProfile == 0) {
+                profile = highBitDepth == 0 ? AV1ProfileMain8 : AV1ProfileMain10;
+            }
+
+            int level = AV1Level2 << seqLevelIdx0;
+            return plToPair(profile, level);
+        }
+
+        // parse av1 sequence header
+        private Pair<Integer, Integer> getProfileLevelFromSeqHeader() {
+            for (int pos = mOffset; pos < mLimit; ) {
+                ObuInfo obuDetails = parseObuHeader(mData, pos, mLimit);
+                ObuParsableBitArray bitArray =
+                        new ObuParsableBitArray(mData, pos + obuDetails.getObuDataOffset(),
+                                pos + obuDetails.getTotalObuSize());
+                if (obuDetails.mObuType == OBU_SEQUENCE_HEADER) {
+                    int profile = -1;
+                    parseSequenceHeader(bitArray);
+                    if (mSeqHeader.seqProfile == 0) {
+                        profile = mSeqHeader.enableHighBitDepth == 0 ? AV1ProfileMain8 :
+                                AV1ProfileMain10;
+                    }
+
+                    int level = AV1Level2 << mSeqHeader.seqLevelIdx[0];
+                    return plToPair(profile, level);
+                }
+                pos += obuDetails.getTotalObuSize();
+            }
+            return null;
+        }
+
         @Override
         public int getFrameType() {
             ArrayList<FrameHeaderObu> headers = new ArrayList();
@@ -609,26 +996,68 @@
             }
             return PICTURE_TYPE_UNKNOWN;
         }
+
+        @Override
+        public Pair<Integer, Integer> getProfileLevel(boolean isCsd) {
+            return isCsd ? getProfileLevelFromCSD() : getProfileLevelFromSeqHeader();
+        }
+    }
+
+    static class AacParser extends ParserBase {
+        @Override
+        public int getFrameType() {
+            return PICTURE_TYPE_UNKNOWN;
+        }
+
+        @Override
+        public Pair<Integer, Integer> getProfileLevel(@SuppressWarnings("unused") boolean isCsd) {
+            // parse AudioSpecificConfig() of ISO 14496 Part 3
+            ParsableBitArray bitArray = new ParsableBitArray(mData, mOffset, mLimit);
+            int audioObjectType = bitArray.readBits(5);
+            if (audioObjectType == 31) {
+                audioObjectType = 32 + bitArray.readBits(6); // audio object type ext
+            }
+            return plToPair(audioObjectType, -1);
+        }
     }
 
     public static ParserBase getParserObject(String mediaType) {
         switch (mediaType) {
+            case MediaFormat.MIMETYPE_VIDEO_MPEG4:
+                return new Mpeg4Parser();
+            case MediaFormat.MIMETYPE_VIDEO_H263:
+                return new H263Parser();
             case MediaFormat.MIMETYPE_VIDEO_AVC:
                 return new AvcParser();
             case MediaFormat.MIMETYPE_VIDEO_HEVC:
                 return new HevcParser();
             case MediaFormat.MIMETYPE_VIDEO_AV1:
                 return new Av1Parser();
+            case MediaFormat.MIMETYPE_VIDEO_VP9:
+                return new Vp9Parser();
+            case MediaFormat.MIMETYPE_AUDIO_AAC:
+                return new AacParser();
         }
         return null;
     }
 
     public static int getFrameTypeFromBitStream(ByteBuffer buf, MediaCodec.BufferInfo info,
             ParserBase o) {
+        if (o == null) return PICTURE_TYPE_UNKNOWN;
         byte[] dataArray = new byte[info.size];
         buf.position(info.offset);
         buf.get(dataArray);
         o.set(dataArray, 0, info.size);
         return o.getFrameType();
     }
+
+    public static Pair<Integer, Integer> getProfileLevelFromBitStream(ByteBuffer buf,
+            MediaCodec.BufferInfo info, ParserBase o) {
+        if (o == null) return null;
+        byte[] dataArray = new byte[info.size];
+        buf.position(info.offset);
+        buf.get(dataArray);
+        o.set(dataArray, 0, info.size);
+        return o.getProfileLevel((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0);
+    }
 }
diff --git a/tests/media/common/src/android/mediav2/common/cts/CodecTestBase.java b/tests/media/common/src/android/mediav2/common/cts/CodecTestBase.java
index b1ccc46..2736823 100644
--- a/tests/media/common/src/android/mediav2/common/cts/CodecTestBase.java
+++ b/tests/media/common/src/android/mediav2/common/cts/CodecTestBase.java
@@ -26,6 +26,11 @@
 import static org.junit.Assert.fail;
 
 import android.content.Context;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraManager;
+import android.hardware.camera2.CameraMetadata;
+import android.hardware.camera2.params.DynamicRangeProfiles;
 import android.hardware.display.DisplayManager;
 import android.media.MediaCodec;
 import android.media.MediaCodecInfo;
@@ -115,6 +120,7 @@
             SystemProperties.getInt("ro.vndk.version", Build.VERSION_CODES.CUR_DEVELOPMENT)
                     >= Build.VERSION_CODES.TIRAMISU;
     public static final boolean IS_HDR_EDITING_SUPPORTED;
+    public static final boolean IS_HDR_CAPTURE_SUPPORTED;
     private static final String LOG_TAG = CodecTestBase.class.getSimpleName();
 
     public static final ArrayList<String> HDR_INFO_IN_BITSTREAM_CODECS = new ArrayList<>();
@@ -280,6 +286,7 @@
     static {
         MEDIA_CODEC_LIST_ALL = new MediaCodecList(MediaCodecList.ALL_CODECS);
         MEDIA_CODEC_LIST_REGULAR = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
+        IS_HDR_CAPTURE_SUPPORTED = isHDRCaptureSupported();
         IS_HDR_EDITING_SUPPORTED = isHDREditingSupported();
         CODEC_SEL_KEY_MEDIA_TYPE_MAP.put("vp8", MediaFormat.MIMETYPE_VIDEO_VP8);
         CODEC_SEL_KEY_MEDIA_TYPE_MAP.put("vp9", MediaFormat.MIMETYPE_VIDEO_VP9);
@@ -455,6 +462,31 @@
         return isSupported;
     }
 
+    public static boolean isHDRCaptureSupported() {
+        // If the device supports HDR, hlg support should always return true
+        if (!MediaUtils.hasCamera()) return false;
+        CameraManager cm = CONTEXT.getSystemService(CameraManager.class);
+        try {
+            String[] cameraIds = cm.getCameraIdList();
+            for (String id : cameraIds) {
+                CameraCharacteristics ch = cm.getCameraCharacteristics(id);
+                int[] caps = ch.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES);
+                if (IntStream.of(caps).anyMatch(x -> x
+                        == CameraMetadata.REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT)) {
+                    Set<Long> profiles =
+                            ch.get(CameraCharacteristics.REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES)
+                                    .getSupportedProfiles();
+                    if (profiles.contains(DynamicRangeProfiles.HLG10)) return true;
+                }
+            }
+        } catch (CameraAccessException e) {
+            Log.e(LOG_TAG, "encountered " + e.getMessage()
+                    + " marking hdr capture to be available to catch your attention");
+            return true;
+        }
+        return false;
+    }
+
     public static boolean isHDREditingSupported() {
         for (MediaCodecInfo codecInfo : MEDIA_CODEC_LIST_REGULAR.getCodecInfos()) {
             if (!codecInfo.isEncoder()) {
@@ -508,7 +540,7 @@
     public static boolean canDisplaySupportHDRContent() {
         DisplayManager displayManager = CONTEXT.getSystemService(DisplayManager.class);
         return displayManager.getDisplay(Display.DEFAULT_DISPLAY).getHdrCapabilities()
-                .getSupportedHdrTypes().length != 0;
+                .getSupportedHdrTypes().length > 0;
     }
 
     public static boolean areFormatsSupported(String name, String mediaType,
diff --git a/tests/media/common/src/android/mediav2/common/cts/EncoderProfileLevelTestBase.java b/tests/media/common/src/android/mediav2/common/cts/EncoderProfileLevelTestBase.java
new file mode 100644
index 0000000..1d81c07b
--- /dev/null
+++ b/tests/media/common/src/android/mediav2/common/cts/EncoderProfileLevelTestBase.java
@@ -0,0 +1,563 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.mediav2.common.cts;
+
+import static android.media.MediaCodecInfo.CodecProfileLevel.*;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import android.media.MediaCodec;
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+import android.util.Log;
+import android.util.Pair;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+/**
+ * Wrapper class for testing encoders support for profile and level
+ */
+public class EncoderProfileLevelTestBase extends CodecEncoderTestBase {
+    private static final String LOG_TAG = EncoderProfileLevelTestBase.class.getSimpleName();
+
+    private static int divUp(int num, int den) {
+        return (num + den - 1) / den;
+    }
+
+    public static int getMinLevel(String mediaType, int width, int height, int frameRate,
+            int bitrate, int profile) {
+        switch (mediaType) {
+            case MediaFormat.MIMETYPE_VIDEO_AVC:
+                return getMinLevelAVC(width, height, frameRate, bitrate);
+            case MediaFormat.MIMETYPE_VIDEO_HEVC:
+                return getMinLevelHEVC(width, height, frameRate, bitrate);
+            case MediaFormat.MIMETYPE_VIDEO_H263:
+                return getMinLevelH263(width, height, frameRate, bitrate);
+            case MediaFormat.MIMETYPE_VIDEO_MPEG2:
+                return getMinLevelMPEG2(width, height, frameRate, bitrate);
+            case MediaFormat.MIMETYPE_VIDEO_MPEG4:
+                return getMinLevelMPEG4(width, height, frameRate, bitrate, profile);
+            case MediaFormat.MIMETYPE_VIDEO_VP9:
+                return getMinLevelVP9(width, height, frameRate, bitrate);
+            case MediaFormat.MIMETYPE_VIDEO_AV1:
+                return getMinLevelAV1(width, height, frameRate, bitrate);
+            default:
+                return -1;
+        }
+    }
+
+    private static int getMinLevelAVC(int width, int height, int frameRate, int bitrate) {
+        class LevelLimitAVC {
+            private LevelLimitAVC(int level, int mbsPerSec, long mbs, int bitrate) {
+                this.mLevel = level;
+                this.mMbsPerSec = mbsPerSec;
+                this.mMbs = mbs;
+                this.mBitrate = bitrate;
+            }
+
+            private final int mLevel;
+            private final int mMbsPerSec;
+            private final long mMbs;
+            private final int mBitrate;
+        }
+        LevelLimitAVC[] limitsAVC = {
+                new LevelLimitAVC(AVCLevel1, 1485, 99, 64000),
+                new LevelLimitAVC(AVCLevel1b, 1485, 99, 128000),
+                new LevelLimitAVC(AVCLevel11, 3000, 396, 192000),
+                new LevelLimitAVC(AVCLevel12, 6000, 396, 384000),
+                new LevelLimitAVC(AVCLevel13, 11880, 396, 768000),
+                new LevelLimitAVC(AVCLevel2, 11880, 396, 2000000),
+                new LevelLimitAVC(AVCLevel21, 19800, 792, 4000000),
+                new LevelLimitAVC(AVCLevel22, 20250, 1620, 4000000),
+                new LevelLimitAVC(AVCLevel3, 40500, 1620, 10000000),
+                new LevelLimitAVC(AVCLevel31, 108000, 3600, 14000000),
+                new LevelLimitAVC(AVCLevel32, 216000, 5120, 20000000),
+                new LevelLimitAVC(AVCLevel4, 245760, 8192, 20000000),
+                new LevelLimitAVC(AVCLevel41, 245760, 8192, 50000000),
+                new LevelLimitAVC(AVCLevel42, 522240, 8704, 50000000),
+                new LevelLimitAVC(AVCLevel5, 589824, 22080, 135000000),
+                new LevelLimitAVC(AVCLevel51, 983040, 36864, 240000000),
+                new LevelLimitAVC(AVCLevel52, 2073600, 36864, 240000000),
+                new LevelLimitAVC(AVCLevel6, 4177920, 139264, 240000000),
+                new LevelLimitAVC(AVCLevel61, 8355840, 139264, 480000000),
+                new LevelLimitAVC(AVCLevel62, 16711680, 139264, 800000000),
+        };
+        int blockSize = 16;
+        int mbs = divUp(width, blockSize) * divUp(height, blockSize);
+        float mbsPerSec = mbs * frameRate;
+        for (LevelLimitAVC levelLimitsAVC : limitsAVC) {
+            if (mbs <= levelLimitsAVC.mMbs && mbsPerSec <= levelLimitsAVC.mMbsPerSec
+                    && bitrate <= levelLimitsAVC.mBitrate) {
+                return levelLimitsAVC.mLevel;
+            }
+        }
+        // if none of the levels suffice, select the highest level
+        return AVCLevel62;
+    }
+
+    private static int getMinLevelHEVC(int width, int height, int frameRate, int bitrate) {
+        class LevelLimitHEVC {
+            private LevelLimitHEVC(int level, int frameRate, long samples, int bitrate) {
+                this.mLevel = level;
+                this.mFrameRate = frameRate;
+                this.mSamples = samples;
+                this.mBitrate = bitrate;
+            }
+
+            private final int mLevel;
+            private final int mFrameRate;
+            private final long mSamples;
+            private final int mBitrate;
+        }
+        LevelLimitHEVC[] limitsHEVC = {
+                new LevelLimitHEVC(HEVCMainTierLevel1, 15, 36864, 128000),
+                new LevelLimitHEVC(HEVCMainTierLevel2, 30, 122880, 1500000),
+                new LevelLimitHEVC(HEVCMainTierLevel21, 30, 245760, 3000000),
+                new LevelLimitHEVC(HEVCMainTierLevel3, 30, 552960, 6000000),
+                new LevelLimitHEVC(HEVCMainTierLevel31, 30, 983040, 10000000),
+                new LevelLimitHEVC(HEVCMainTierLevel4, 30, 2228224, 12000000),
+                new LevelLimitHEVC(HEVCHighTierLevel4, 30, 2228224, 30000000),
+                new LevelLimitHEVC(HEVCMainTierLevel41, 60, 2228224, 20000000),
+                new LevelLimitHEVC(HEVCHighTierLevel41, 60, 2228224, 50000000),
+                new LevelLimitHEVC(HEVCMainTierLevel5, 30, 8912896, 25000000),
+                new LevelLimitHEVC(HEVCHighTierLevel5, 30, 8912896, 100000000),
+                new LevelLimitHEVC(HEVCMainTierLevel51, 60, 8912896, 40000000),
+                new LevelLimitHEVC(HEVCHighTierLevel51, 60, 8912896, 160000000),
+                new LevelLimitHEVC(HEVCMainTierLevel52, 120, 8912896, 60000000),
+                new LevelLimitHEVC(HEVCHighTierLevel52, 120, 8912896, 240000000),
+                new LevelLimitHEVC(HEVCMainTierLevel6, 30, 35651584, 60000000),
+                new LevelLimitHEVC(HEVCHighTierLevel6, 30, 35651584, 240000000),
+                new LevelLimitHEVC(HEVCMainTierLevel61, 60, 35651584, 120000000),
+                new LevelLimitHEVC(HEVCHighTierLevel61, 60, 35651584, 480000000),
+                new LevelLimitHEVC(HEVCMainTierLevel62, 120, 35651584, 240000000),
+                new LevelLimitHEVC(HEVCHighTierLevel62, 120, 35651584, 800000000),
+        };
+        int blockSize = 8;
+        int blocks = divUp(width, blockSize) * divUp(height, blockSize);
+        int samples = blocks * blockSize * blockSize;
+        for (LevelLimitHEVC levelLimitsHEVC : limitsHEVC) {
+            if (samples <= levelLimitsHEVC.mSamples && frameRate <= levelLimitsHEVC.mFrameRate
+                    && bitrate <= levelLimitsHEVC.mBitrate) {
+                return levelLimitsHEVC.mLevel;
+            }
+        }
+        // if none of the levels suffice, select the highest level
+        return HEVCHighTierLevel62;
+    }
+
+    private static int getMinLevelH263(int width, int height, int frameRate, int bitrate) {
+        class LevelLimitH263 {
+            private LevelLimitH263(int level, long sampleRate, int width, int height, int frameRate,
+                    int bitrate) {
+                this.mLevel = level;
+                this.mSampleRate = sampleRate;
+                this.mWidth = width;
+                this.mHeight = height;
+                this.mFrameRate = frameRate;
+                this.mBitrate = bitrate;
+            }
+
+            private final int mLevel;
+            private final long mSampleRate;
+            private final int mWidth;
+            private final int mHeight;
+            private final int mFrameRate;
+            private final int mBitrate;
+        }
+        LevelLimitH263[] limitsH263 = {
+                new LevelLimitH263(H263Level10, 380160, 176, 144, 15, 64000),
+                new LevelLimitH263(H263Level45, 380160, 176, 144, 15, 128000),
+                new LevelLimitH263(H263Level20, 1520640, 352, 288, 30, 128000),
+                new LevelLimitH263(H263Level30, 3041280, 352, 288, 30, 384000),
+                new LevelLimitH263(H263Level40, 3041280, 352, 288, 30, 2048000),
+                new LevelLimitH263(H263Level50, 5068800, 352, 288, 60, 4096000),
+                new LevelLimitH263(H263Level60, 10368000, 720, 288, 60, 8192000),
+                new LevelLimitH263(H263Level70, 20736000, 720, 576, 60, 16384000),
+        };
+        int blockSize = 16;
+        int mbs = divUp(width, blockSize) * divUp(height, blockSize);
+        int size = mbs * blockSize * blockSize;
+        int sampleRate = size * frameRate;
+        for (LevelLimitH263 levelLimitsH263 : limitsH263) {
+            if (sampleRate <= levelLimitsH263.mSampleRate && height <= levelLimitsH263.mHeight
+                    && width <= levelLimitsH263.mWidth && frameRate <= levelLimitsH263.mFrameRate
+                    && bitrate <= levelLimitsH263.mBitrate) {
+                return levelLimitsH263.mLevel;
+            }
+        }
+        // if none of the levels suffice, select the highest level
+        return H263Level70;
+    }
+
+    private static int getMinLevelVP9(int width, int height, int frameRate, int bitrate) {
+        class LevelLimitVP9 {
+            private LevelLimitVP9(int level, long sampleRate, int size, int maxWH, int bitrate) {
+                this.mLevel = level;
+                this.mSampleRate = sampleRate;
+                this.mSize = size;
+                this.mMaxWH = maxWH;
+                this.mBitrate = bitrate;
+            }
+
+            private final int mLevel;
+            private final long mSampleRate;
+            private final int mSize;
+            private final int mMaxWH;
+            private final int mBitrate;
+        }
+        LevelLimitVP9[] limitsVP9 = {
+                new LevelLimitVP9(VP9Level1, 829440, 36864, 512, 200000),
+                new LevelLimitVP9(VP9Level11, 2764800, 73728, 768, 800000),
+                new LevelLimitVP9(VP9Level2, 4608000, 122880, 960, 1800000),
+                new LevelLimitVP9(VP9Level21, 9216000, 245760, 1344, 3600000),
+                new LevelLimitVP9(VP9Level3, 20736000, 552960, 2048, 7200000),
+                new LevelLimitVP9(VP9Level31, 36864000, 983040, 2752, 12000000),
+                new LevelLimitVP9(VP9Level4, 83558400, 2228224, 4160, 18000000),
+                new LevelLimitVP9(VP9Level41, 160432128, 2228224, 4160, 30000000),
+                new LevelLimitVP9(VP9Level5, 311951360, 8912896, 8384, 60000000),
+                new LevelLimitVP9(VP9Level51, 588251136, 8912896, 8384, 120000000),
+                new LevelLimitVP9(VP9Level52, 1176502272, 8912896, 8384, 180000000),
+                new LevelLimitVP9(VP9Level6, 1176502272, 35651584, 16832, 180000000),
+                new LevelLimitVP9(VP9Level61, 2353004544L, 35651584, 16832, 240000000),
+                new LevelLimitVP9(VP9Level62, 4706009088L, 35651584, 16832, 480000000),
+        };
+        int blockSize = 8;
+        int blocks = divUp(width, blockSize) * divUp(height, blockSize);
+        int size = blocks * blockSize * blockSize;
+        int sampleRate = size * frameRate;
+        int maxWH = Math.max(width, height);
+        for (LevelLimitVP9 levelLimitsVP9 : limitsVP9) {
+            if (sampleRate <= levelLimitsVP9.mSampleRate && size <= levelLimitsVP9.mSize
+                    && maxWH <= levelLimitsVP9.mMaxWH && bitrate <= levelLimitsVP9.mBitrate) {
+                return levelLimitsVP9.mLevel;
+            }
+        }
+        // if none of the levels suffice, select the highest level
+        return VP9Level62;
+    }
+
+    private static int getMinLevelMPEG2(int width, int height, int frameRate, int bitrate) {
+        class LevelLimitMPEG2 {
+            private LevelLimitMPEG2(int level, long sampleRate, int width, int height,
+                    int frameRate, int bitrate) {
+                this.mLevel = level;
+                this.mSampleRate = sampleRate;
+                this.mWidth = width;
+                this.mHeight = height;
+                this.mFrameRate = frameRate;
+                this.mBitrate = bitrate;
+            }
+
+            private final int mLevel;
+            private final long mSampleRate;
+            private final int mWidth;
+            private final int mHeight;
+            private final int mFrameRate;
+            private final int mBitrate;
+        }
+        // main profile limits, higher profiles will also support selected level
+        LevelLimitMPEG2[] limitsMPEG2 = {
+                new LevelLimitMPEG2(MPEG2LevelLL, 3041280, 352, 288, 30, 4000000),
+                new LevelLimitMPEG2(MPEG2LevelML, 10368000, 720, 576, 30, 15000000),
+                new LevelLimitMPEG2(MPEG2LevelH14, 47001600, 1440, 1088, 60, 60000000),
+                new LevelLimitMPEG2(MPEG2LevelHL, 62668800, 1920, 1088, 60, 80000000),
+                new LevelLimitMPEG2(MPEG2LevelHP, 125337600, 1920, 1088, 60, 80000000),
+        };
+        int blockSize = 16;
+        int mbs = divUp(width, blockSize) * divUp(height, blockSize);
+        int size = mbs * blockSize * blockSize;
+        int sampleRate = size * frameRate;
+        for (LevelLimitMPEG2 levelLimitsMPEG2 : limitsMPEG2) {
+            if (sampleRate <= levelLimitsMPEG2.mSampleRate && width <= levelLimitsMPEG2.mWidth
+                    && height <= levelLimitsMPEG2.mHeight
+                    && frameRate <= levelLimitsMPEG2.mFrameRate
+                    && bitrate <= levelLimitsMPEG2.mBitrate) {
+                return levelLimitsMPEG2.mLevel;
+            }
+        }
+        // if none of the levels suffice, select the highest level
+        return MPEG2LevelHP;
+    }
+
+    private static int getMinLevelMPEG4(int width, int height, int frameRate, int bitrate,
+            int profile) {
+        class LevelLimitMPEG4 {
+            private LevelLimitMPEG4(int profile, int level, long sampleRate, int width, int height,
+                    int frameRate, int bitrate) {
+                this.mProfile = profile;
+                this.mLevel = level;
+                this.mSampleRate = sampleRate;
+                this.mWidth = width;
+                this.mHeight = height;
+                this.mFrameRate = frameRate;
+                this.mBitrate = bitrate;
+            }
+
+            private final int mProfile;
+            private final int mLevel;
+            private final long mSampleRate;
+            private final int mWidth;
+            private final int mHeight;
+            private final int mFrameRate;
+            private final int mBitrate;
+        }
+        // simple profile limits, higher profiles will also support selected level
+        LevelLimitMPEG4[] limitsMPEG4 = {
+                new LevelLimitMPEG4(MPEG4ProfileSimple, MPEG4Level0, 380160, 176, 144, 15, 64000),
+                new LevelLimitMPEG4(MPEG4ProfileSimple, MPEG4Level1, 380160, 176, 144, 30, 64000),
+                new LevelLimitMPEG4(MPEG4ProfileSimple, MPEG4Level0b, 380160, 176, 144, 15, 128000),
+                new LevelLimitMPEG4(MPEG4ProfileSimple, MPEG4Level2, 1520640, 352, 288, 30, 128000),
+                new LevelLimitMPEG4(MPEG4ProfileSimple, MPEG4Level3, 3041280, 352, 288, 30, 384000),
+                new LevelLimitMPEG4(
+                        MPEG4ProfileSimple, MPEG4Level4a, 9216000, 640, 480, 30, 4000000),
+                new LevelLimitMPEG4(
+                        MPEG4ProfileSimple, MPEG4Level5, 10368000, 720, 576, 30, 8000000),
+                new LevelLimitMPEG4(
+                        MPEG4ProfileSimple, MPEG4Level6, 27648000, 1280, 720, 30, 12000000),
+                new LevelLimitMPEG4(
+                        MPEG4ProfileAdvancedSimple, MPEG4Level1, 760320, 176, 144, 30, 128000),
+                new LevelLimitMPEG4(
+                        MPEG4ProfileAdvancedSimple, MPEG4Level2, 1520640, 352, 288, 30, 384000),
+                new LevelLimitMPEG4(
+                        MPEG4ProfileAdvancedSimple, MPEG4Level3, 3041280, 352, 288, 30, 768000),
+                new LevelLimitMPEG4(
+                        MPEG4ProfileAdvancedSimple, MPEG4Level3b, 3041280, 352, 288, 30, 1500000),
+                new LevelLimitMPEG4(
+                        MPEG4ProfileAdvancedSimple, MPEG4Level4, 6082560, 704, 576, 30, 3000000),
+                new LevelLimitMPEG4(
+                        MPEG4ProfileAdvancedSimple, MPEG4Level5, 12441600, 720, 576, 30, 8000000),
+        };
+        int blockSize = 16;
+        int mbs = divUp(width, blockSize) * divUp(height, blockSize);
+        int size = mbs * blockSize * blockSize;
+        int sampleRate = size * frameRate;
+        if (profile != MPEG4ProfileAdvancedSimple && profile != MPEG4ProfileSimple) {
+            throw new RuntimeException("Unrecognized profile " + profile + " for "
+                    + MediaFormat.MIMETYPE_VIDEO_MPEG4);
+        }
+        for (LevelLimitMPEG4 levelLimitsMPEG4 : limitsMPEG4) {
+            if (profile == levelLimitsMPEG4.mProfile && sampleRate <= levelLimitsMPEG4.mSampleRate
+                    && width <= levelLimitsMPEG4.mWidth && height <= levelLimitsMPEG4.mHeight
+                    && frameRate <= levelLimitsMPEG4.mFrameRate
+                    && bitrate <= levelLimitsMPEG4.mBitrate) {
+                return levelLimitsMPEG4.mLevel;
+            }
+        }
+        // if none of the levels suffice, select the highest level
+        return MPEG4Level6;
+    }
+
+    private static int getMinLevelAV1(int width, int height, int frameRate, int bitrate) {
+        class LevelLimitAV1 {
+            private LevelLimitAV1(int level, int size, int width, int height, long sampleRate,
+                    int bitrate) {
+                this.mLevel = level;
+                this.mSize = size;
+                this.mWidth = width;
+                this.mHeight = height;
+                this.mSampleRate = sampleRate;
+                this.mBitrate = bitrate;
+            }
+
+            private final int mLevel;
+            private final int mSize;
+            private final int mWidth;
+            private final int mHeight;
+            private final long mSampleRate;
+            private final int mBitrate;
+        }
+        // taking bitrate from main profile, will also be supported by high profile
+        LevelLimitAV1[] limitsAV1 = {
+                new LevelLimitAV1(AV1Level2, 147456, 2048, 1152, 4423680, 1500000),
+                new LevelLimitAV1(AV1Level21, 278784, 2816, 1584, 8363520, 3000000),
+                new LevelLimitAV1(AV1Level3, 665856, 4352, 2448, 19975680, 6000000),
+                new LevelLimitAV1(AV1Level31, 1065024, 5504, 3096, 31950720, 10000000),
+                new LevelLimitAV1(AV1Level4, 2359296, 6144, 3456, 70778880, 12000000),
+                new LevelLimitAV1(AV1Level41, 2359296, 6144, 3456, 141557760, 20000000),
+                new LevelLimitAV1(AV1Level5, 8912896, 8192, 4352, 267386880, 30000000),
+                new LevelLimitAV1(AV1Level51, 8912896, 8192, 4352, 534773760, 40000000),
+                new LevelLimitAV1(AV1Level52, 8912896, 8192, 4352, 1069547520, 60000000),
+                new LevelLimitAV1(AV1Level53, 8912896, 8192, 4352, 1069547520, 60000000),
+                new LevelLimitAV1(AV1Level6, 35651584, 16384, 8704, 1069547520, 60000000),
+                new LevelLimitAV1(AV1Level61, 35651584, 16384, 8704, 2139095040, 100000000),
+                new LevelLimitAV1(AV1Level62, 35651584, 16384, 8704, 4278190080L, 160000000),
+                new LevelLimitAV1(AV1Level63, 35651584, 16384, 8704, 4278190080L, 160000000),
+        };
+        int blockSize = 8;
+        int blocks = divUp(width, blockSize) * divUp(height, blockSize);
+        int size = blocks * blockSize * blockSize;
+        int sampleRate = size * frameRate;
+        for (LevelLimitAV1 levelLimitsAV1 : limitsAV1) {
+            if (size <= levelLimitsAV1.mSize && width <= levelLimitsAV1.mWidth
+                    && height <= levelLimitsAV1.mHeight && sampleRate <= levelLimitsAV1.mSampleRate
+                    && bitrate <= levelLimitsAV1.mBitrate) {
+                return levelLimitsAV1.mLevel;
+            }
+        }
+        // if none of the levels suffice or high profile, select the highest level
+        return AV1Level73;
+    }
+
+    protected BitStreamUtils.ParserBase mParser;
+    protected Pair<Integer, Integer> mProfileLevel;
+    protected boolean mGotCsd;
+
+    public EncoderProfileLevelTestBase(String encoder, String mediaType,
+            EncoderConfigParams[] encCfgParams, String allTestParams) {
+        super(encoder, mediaType, encCfgParams, allTestParams);
+    }
+
+    @Override
+    protected void resetContext(boolean isAsync, boolean signalEOSWithLastFrame) {
+        super.resetContext(isAsync, signalEOSWithLastFrame);
+        mParser = BitStreamUtils.getParserObject(mMediaType);
+        mProfileLevel = null;
+        mGotCsd = false;
+    }
+
+    @Override
+    protected void dequeueOutput(int bufferIndex, MediaCodec.BufferInfo info) {
+        if (info.size > 0 && mProfileLevel == null) {
+            if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
+                ByteBuffer buf = mCodec.getOutputBuffer(bufferIndex);
+                mProfileLevel = BitStreamUtils.getProfileLevelFromBitStream(buf, info, mParser);
+                mGotCsd = true;
+            } else {
+                if ((mMediaType.equals(MediaFormat.MIMETYPE_VIDEO_VP9) || mMediaType.equals(
+                        MediaFormat.MIMETYPE_VIDEO_H263)) && mOutputCount == 0) {
+                    ByteBuffer buf = mCodec.getOutputBuffer(bufferIndex);
+                    mProfileLevel = BitStreamUtils.getProfileLevelFromBitStream(buf, info, mParser);
+                }
+            }
+        }
+        super.dequeueOutput(bufferIndex, info);
+    }
+
+    private int getProfile(MediaFormat format, String msg) {
+        // Query output profile. KEY_PROFILE gets precedence over KEY_AAC_PROFILE
+        int aacProfile = format.getInteger(MediaFormat.KEY_AAC_PROFILE, -1);
+        int profile = format.getInteger(MediaFormat.KEY_PROFILE, aacProfile);
+        if (profile != -1) {
+            return profile;
+        } else {
+            fail(msg + "profile key not present in format " + format + mTestConfig + mTestEnv);
+        }
+        return -1;
+    }
+
+    private int getLevel(MediaFormat format, String msg) {
+        assertTrue(msg + "level not present in format " + format + mTestConfig + mTestEnv,
+                format.containsKey(MediaFormat.KEY_LEVEL));
+        return format.getInteger(MediaFormat.KEY_LEVEL);
+    }
+
+    protected void validateProfile(int exp, int got, String msg) {
+        if (mMediaType.equals(MediaFormat.MIMETYPE_VIDEO_AVC)) {
+            if (exp == AVCProfileBaseline || exp == AVCProfileConstrainedBaseline) {
+                assertTrue(String.format(msg + "Profile information mismatch, Expected %d, Got %d ",
+                                exp, got) + mTestConfig + mTestEnv,
+                        (got == AVCProfileBaseline || got == AVCProfileConstrainedBaseline));
+                return;
+            } else if (exp == AVCProfileHigh || exp == AVCProfileConstrainedHigh) {
+                assertTrue(String.format(msg + "Profile information mismatch, Expected %d, Got %d ",
+                                exp, got) + mTestConfig + mTestEnv,
+                        (got == AVCProfileHigh || got == AVCProfileConstrainedHigh));
+                return;
+            }
+        }
+        assertEquals(String.format(msg + "Profile information mismatch, Expected %d, Got %d ",
+                exp, got) + mTestConfig + mTestEnv, exp, got);
+    }
+
+    protected void validateLevel(int exp, int got, String msg) {
+        assertEquals(String.format(msg + "Level information mismatch, Expected %d, Got %d ",
+                exp, got) + mTestConfig + mTestEnv, exp, got);
+    }
+
+    protected void validateMinLevel(int min, int got, String msg) {
+        assertTrue(String.format(msg + "Level information unexpected, Expected at least %d,"
+                + " Got %d ", min, got) + mTestConfig + mTestEnv, min <= got);
+    }
+
+    protected void validateBitStreamForProfileAndLevel(int cfgProfile, int cfgLevel) {
+        if (mProfileLevel != null) {
+            validateProfile(cfgProfile, mProfileLevel.first, "Validating profile of bitstream : ");
+            if (mProfileLevel.second != -1) {
+                validateMinLevel(cfgLevel, mProfileLevel.second,
+                        "Validating level of bitstream : ");
+            }
+        }
+    }
+
+    protected void validateFormatForProfileAndLevelWRTBitstream(MediaFormat format, String msg) {
+        if (mProfileLevel != null) {
+            validateProfile(mProfileLevel.first, getProfile(format, msg), msg);
+            if (mProfileLevel.second != -1) {
+                validateLevel(mProfileLevel.second, getLevel(format, msg), msg);
+            }
+        }
+    }
+
+    protected void validateFormatForProfileAndLevelWRTCfg(MediaFormat format, String msg) {
+        validateProfile(mActiveEncCfg.mProfile, getProfile(format, msg), msg);
+        if (mActiveEncCfg.mLevel != -1) {
+            validateMinLevel(mActiveEncCfg.mLevel, getLevel(format, msg), msg);
+        }
+    }
+
+    protected void validateFormatForProfileAndLevel(MediaFormat format, String msg) {
+        validateFormatForProfileAndLevelWRTBitstream(format, msg + " wrt to bitstream : ");
+        validateFormatForProfileAndLevelWRTCfg(format, msg + " wrt to cfg : ");
+    }
+
+    protected MediaFormat validateProfileAndLevel() throws IOException {
+        // check if bitstream is in accordance with configured profile and level info.
+        if (mProfileLevel != null) {
+            validateBitStreamForProfileAndLevel(mActiveEncCfg.mProfile, mActiveEncCfg.mLevel);
+        }
+
+        // check if output format is in accordance with configured profile and level info.
+        if (mCodecName.toUpperCase().startsWith("OMX")) {
+            Log.i(LOG_TAG, "omx components don't present prof/level in outputformat");
+        } else {
+            validateFormatForProfileAndLevel(mOutFormat, "Testing output format : ");
+        }
+
+        // check if extracted output profile and level information are in accordance with
+        // configured profile and level info
+        if (mMuxOutput && !mMediaType.equals(MediaFormat.MIMETYPE_VIDEO_H263)) {
+            // Explicit signaling of header information such as profile, level etc is not
+            // directly available as HLS in H263 header. That information is conveyed through
+            // external means of RTP header
+            MediaExtractor extractor = new MediaExtractor();
+            extractor.setDataSource(mMuxedOutputFile);
+            assertEquals("Should be only 1 track \n" + mTestConfig + mTestEnv, 1,
+                    extractor.getTrackCount());
+            MediaFormat trackFormat = extractor.getTrackFormat(0);
+            extractor.release();
+            if (mGotCsd || (trackFormat.containsKey(MediaFormat.KEY_PROFILE)
+                    || trackFormat.containsKey(MediaFormat.KEY_LEVEL))) {
+                validateFormatForProfileAndLevel(trackFormat, "Testing extractor format :- ");
+            }
+            return trackFormat;
+        }
+        return null;
+    }
+}
diff --git a/tests/media/src/android/mediav2/cts/CodecInfoTest.java b/tests/media/src/android/mediav2/cts/CodecInfoTest.java
index 5896b23..d295a59 100644
--- a/tests/media/src/android/mediav2/cts/CodecInfoTest.java
+++ b/tests/media/src/android/mediav2/cts/CodecInfoTest.java
@@ -19,28 +19,30 @@
 import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_Format32bitABGR2101010;
 import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface;
 import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible;
+import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar;
+import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar;
+import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar;
+import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar;
 import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatYUVP010;
 import static android.media.MediaCodecInfo.CodecCapabilities.FEATURE_HdrEditing;
-import static android.mediav2.common.cts.CodecTestBase.CONTEXT;
 import static android.mediav2.common.cts.CodecTestBase.FIRST_SDK_IS_AT_LEAST_T;
 import static android.mediav2.common.cts.CodecTestBase.IS_AT_LEAST_T;
+import static android.mediav2.common.cts.CodecTestBase.IS_HDR_CAPTURE_SUPPORTED;
 import static android.mediav2.common.cts.CodecTestBase.PROFILE_HDR10_MAP;
 import static android.mediav2.common.cts.CodecTestBase.PROFILE_HDR10_PLUS_MAP;
-import static android.mediav2.common.cts.CodecTestBase.PROFILE_HDR_MAP;
 import static android.mediav2.common.cts.CodecTestBase.VNDK_IS_AT_LEAST_T;
+import static android.mediav2.common.cts.CodecTestBase.canDisplaySupportHDRContent;
 import static android.mediav2.common.cts.CodecTestBase.isVendorCodec;
 import static android.mediav2.common.cts.CodecTestBase.selectCodecs;
 
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
-import android.hardware.display.DisplayManager;
 import android.media.MediaCodecInfo;
 import android.media.MediaCodecInfo.CodecProfileLevel;
 import android.media.MediaCodecList;
 import android.mediav2.common.cts.CodecTestBase;
 import android.os.Build;
-import android.view.Display;
 
 import androidx.test.filters.SmallTest;
 
@@ -68,19 +70,11 @@
 @RunWith(Parameterized.class)
 public class CodecInfoTest {
     private static final String LOG_TAG = CodecInfoTest.class.getSimpleName();
-    private static final int[] DISPLAY_HDR_TYPES;
 
     public String mMediaType;
     public String mCodecName;
     public MediaCodecInfo mCodecInfo;
 
-    static {
-        DisplayManager displayManager = CONTEXT.getSystemService(DisplayManager.class);
-        DISPLAY_HDR_TYPES =
-                displayManager.getDisplay(Display.DEFAULT_DISPLAY).getHdrCapabilities()
-                        .getSupportedHdrTypes();
-    }
-
     public CodecInfoTest(String mediaType, String codecName, MediaCodecInfo codecInfo) {
         mMediaType = mediaType;
         mCodecName = codecName;
@@ -145,7 +139,7 @@
             // native level, separate the following to independent checks for HDR10 and HDR10+
             if (isHdr10Profile || isHdr10PlusProfile) {
                 assertTrue(mCodecInfo.getName() + " Advertises support for HDR10/HDR10+ profile " +
-                        pl.profile + " without any HDR display", DISPLAY_HDR_TYPES.length > 0);
+                        pl.profile + " without any HDR display", canDisplaySupportHDRContent());
             }
         }
     }
@@ -155,7 +149,8 @@
      * formats. The test only checks if the decoder/encoder is advertising the required color
      * format. It doesn't verify if it actually supports by decoding/encoding.
      */
-    @CddTest(requirements = {"5.1.7/C-1-2", "5.1.7/C-4-1", "5.12/C-6-5", "5.12/C-7-3"})
+    @CddTest(requirements = {"5.1.7/C-1-2", "5.1.7/C-1-3", "5.1.7/C-4-1", "5.12/C-6-5",
+            "5.12/C-7-1", "5.12/C-7-3"})
     @Test
     public void testColorFormatSupport() {
         Assume.assumeTrue("Test is applicable for video codecs", mMediaType.startsWith("video/"));
@@ -164,20 +159,54 @@
                 IntStream.of(caps.colorFormats)
                         .noneMatch(x -> x == COLOR_FormatYUV420Flexible));
 
-        // Encoders that support FEATURE_HdrEditing, must support P010 and ABGR2101010
-        // color format and at least one HDR profile
-        boolean hdrEditingSupported = caps.isFeatureSupported(FEATURE_HdrEditing);
-        if (mCodecInfo.isEncoder() && hdrEditingSupported) {
-            boolean abgr2101010Supported =
-                    IntStream.of(caps.colorFormats)
-                            .anyMatch(x -> x == COLOR_Format32bitABGR2101010);
-            boolean p010Supported =
-                    IntStream.of(caps.colorFormats).anyMatch(x -> x == COLOR_FormatYUVP010);
-            assertTrue(mCodecName + " supports FEATURE_HdrEditing, but does not support " +
-                    "COLOR_FormatABGR2101010 and COLOR_FormatYUVP010 color formats.",
-                    abgr2101010Supported && p010Supported);
-            assertTrue(mCodecName + " supports FEATURE_HdrEditing, but does not support any HDR " +
-                    "profiles.", CodecTestBase.doesCodecSupportHDRProfile(mCodecName, mMediaType));
+        assertFalse(mCodecInfo.getName()
+                        + " does not support at least one of planar or semi planar yuv 420 888",
+                IntStream.of(caps.colorFormats)
+                        .noneMatch(x -> x == COLOR_FormatYUV420PackedPlanar)
+                        && IntStream.of(caps.colorFormats)
+                        .noneMatch(x -> x == COLOR_FormatYUV420Planar)
+                        && IntStream.of(caps.colorFormats)
+                        .noneMatch(x -> x == COLOR_FormatYUV420PackedSemiPlanar)
+                        && IntStream.of(caps.colorFormats)
+                        .noneMatch(x -> x == COLOR_FormatYUV420SemiPlanar));
+
+        boolean canHandleHdr = CodecTestBase.doesCodecSupportHDRProfile(mCodecName, mMediaType);
+        if (mCodecInfo.isEncoder()) {
+            if (IS_HDR_CAPTURE_SUPPORTED && canHandleHdr) {
+                assertFalse(mCodecInfo.getName()
+                                + " supports HDR profile but does not support COLOR_FormatYUVP010",
+                        IntStream.of(caps.colorFormats).noneMatch(x -> x == COLOR_FormatYUVP010));
+            }
+
+            // Encoders that support FEATURE_HdrEditing, must support ABGR2101010 color format
+            // and at least one HDR profile
+            boolean hdrEditingSupported = caps.isFeatureSupported(FEATURE_HdrEditing);
+            if (hdrEditingSupported) {
+                boolean abgr2101010Supported = IntStream.of(caps.colorFormats)
+                        .anyMatch(x -> x == COLOR_Format32bitABGR2101010);
+                assertTrue(mCodecName + " supports FEATURE_HdrEditing, but does not support"
+                        + " COLOR_FormatABGR2101010 color formats.", abgr2101010Supported);
+                assertTrue(mCodecName + " supports FEATURE_HdrEditing, but does not support"
+                        + " any HDR profiles.", canHandleHdr);
+            }
+        } else {
+            if (FIRST_SDK_IS_AT_LEAST_T && VNDK_IS_AT_LEAST_T && canDisplaySupportHDRContent()) {
+                if (MediaUtils.isTv()) {
+                    // Some TV devices support HDR10 display with VO instead of GPU. In this
+                    // case, skip checking P010 on TV devices.
+                    Assume.assumeFalse(mCodecInfo.getName()
+                                    + " supports HDR profile but does not support "
+                                    + "COLOR_FormatYUVP010. Skip checking on TV device",
+                            IntStream.of(caps.colorFormats)
+                                    .noneMatch(x -> x == COLOR_FormatYUVP010));
+                } else {
+                    assertFalse(mCodecInfo.getName()
+                                    + " supports HDR profile but does not support "
+                                    + "COLOR_FormatYUVP010",
+                            IntStream.of(caps.colorFormats)
+                                    .noneMatch(x -> x == COLOR_FormatYUVP010));
+                }
+            }
         }
 
         // COLOR_FormatSurface support is an existing requirement, but we did not
@@ -190,41 +219,6 @@
         }
     }
 
-    /** For devices launching with Android T or higher, if a codec supports an HDR profile and
-     * device supports HDR display, it must support COLOR_FormatYUVP010 as a video decoder output
-     * format. For TVs, this requirement is optional.
-     */
-    @CddTest(requirements = "5.12/C-6-5")
-    @Test
-    public void testP010SupportForHDRDisplay() {
-        Assume.assumeTrue("Test is applicable for video codecs", mMediaType.startsWith("video/"));
-        MediaCodecInfo.CodecCapabilities caps = mCodecInfo.getCapabilitiesForType(mMediaType);
-        int[] HdrProfileArray = PROFILE_HDR_MAP.get(mMediaType);
-        if (FIRST_SDK_IS_AT_LEAST_T && VNDK_IS_AT_LEAST_T
-                && HdrProfileArray != null && DISPLAY_HDR_TYPES.length > 0) {
-            for (CodecProfileLevel pl : caps.profileLevels) {
-                if (IntStream.of(HdrProfileArray).anyMatch(x -> x == pl.profile)) {
-                    if (MediaUtils.isTv()) {
-                        // Some TV devices support HDR10 display with VO instead of GPU. In this
-                        // case, skip checking P010 on TV devices.
-                        Assume.assumeFalse(mCodecInfo.getName() + " supports HDR profile "
-                                        + pl.profile + ","
-                                        + " but does not support COLOR_FormatYUVP010."
-                                        + " Skip checking on TV device",
-                                IntStream.of(caps.colorFormats)
-                                        .noneMatch(x -> x == COLOR_FormatYUVP010));
-                    } else {
-                        assertFalse(mCodecInfo.getName() + " supports HDR profile "
-                                        + pl.profile + "," +
-                                        " but does not support COLOR_FormatYUVP010",
-                                IntStream.of(caps.colorFormats)
-                                        .noneMatch(x -> x == COLOR_FormatYUVP010));
-                    }
-                }
-            }
-        }
-    }
-
     /**
      * For all the available encoders on the device, the test checks if their encoding
      * capabilities are in sync with the device's decoding capabilities.
diff --git a/tests/media/src/android/mediav2/cts/EncoderProfileLevelTest.java b/tests/media/src/android/mediav2/cts/EncoderProfileLevelTest.java
index 025f130..9a6b350 100644
--- a/tests/media/src/android/mediav2/cts/EncoderProfileLevelTest.java
+++ b/tests/media/src/android/mediav2/cts/EncoderProfileLevelTest.java
@@ -24,15 +24,12 @@
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
 
 import android.media.MediaCodec;
 import android.media.MediaCodecInfo;
-import android.media.MediaExtractor;
 import android.media.MediaFormat;
-import android.media.MediaMuxer;
-import android.mediav2.common.cts.CodecEncoderTestBase;
 import android.mediav2.common.cts.EncoderConfigParams;
+import android.mediav2.common.cts.EncoderProfileLevelTestBase;
 import android.mediav2.common.cts.OutputManager;
 import android.util.Log;
 import android.util.Pair;
@@ -45,9 +42,7 @@
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 
-import java.io.File;
 import java.io.IOException;
-import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashMap;
@@ -59,23 +54,26 @@
  * in its codec capabilities. The test sets profile and level keys in media format and uses it
  * during encoder configuration. Upon successful configuration, frames are queued for encoding
  * (byte buffer mode) and the encoded output (bitstream) is expected to contain the same profile
- * and level information that was used during configure.
+ * that was used during configure. The level shall be at least the input configured level.
  * <p>
- * NOTE: The test configures profile, level information basing on standard guidelines, not
- * arbitrarily so encoders ARE expected to place these values in the bitstream as-is.
+ * NOTE: The test configures level information basing on standard guidelines, not arbitrarily so
+ * encoders are expected to maintain at least the input configured level
  * <p>
- * The test additionally checks if the output format returned by component contains same profile
- * and level information. Having output format contain this information is useful during muxing
+ * The test parses the bitstream (csd or frame header) and determines profile and level
+ * information. This serves as reference for further validation. The test checks if the output
+ * format returned by component contains same profile and level information as the bitstream. The
+ * output of encoder is muxed and is extracted. The extracted format is expected to contain same
+ * profile and level information as the bitstream.
  * <p>
  * As per cdd, if a device contains an encoder capable of encoding a profile/level combination
  * then it should contain a decoder capable of decoding the same profile/level combination. This
  * is verified.
  * <p>
  * If device implementations support encoding in a media type, then as per cdd they are expected to
- * handle certain profile and level configurations. This is verified as well.
+ * handle certain profile and level configurations. This is verified.
  */
 @RunWith(Parameterized.class)
-public class EncoderProfileLevelTest extends CodecEncoderTestBase {
+public class EncoderProfileLevelTest extends EncoderProfileLevelTestBase {
     private static final String LOG_TAG = EncoderProfileLevelTest.class.getSimpleName();
     private static final HashMap<String, Pair<int[], Integer>> PROFILE_LEVEL_CDD = new HashMap<>();
 
@@ -85,16 +83,52 @@
         super(encoder, mediaType, encCfgParams, allTestParams);
     }
 
+    private static List<Object[]> prepareTestArgs(Object[] arg, int[] profiles, int colorFormat) {
+        List<Object[]> argsList = new ArrayList<>();
+        final int[] maxBFrames = {0, 2};
+        final String mediaType = (String) arg[0];
+        boolean isVideo = mediaType.startsWith("video/");
+        final int br = (int) arg[1];
+        final int param1 = (int) arg[2];
+        final int param2 = (int) arg[3];
+        final int fps = (int) arg[4];
+        if (isVideo) {
+            for (int maxBframe : maxBFrames) {
+                if (maxBframe != 0) {
+                    if (!mediaType.equals(MediaFormat.MIMETYPE_VIDEO_AVC)
+                            && !mediaType.equals(MediaFormat.MIMETYPE_VIDEO_HEVC)) {
+                        continue;
+                    }
+                }
+                Object[] testArgs = new Object[3];
+                testArgs[0] = arg[0];
+                testArgs[1] = getVideoEncoderCfgParams(mediaType, br, param1, param2, fps,
+                        colorFormat, maxBframe, profiles);
+                testArgs[2] = String.format("%dkbps_%dx%d_%dfps_%s_%d-bframes", br / 1000, param1,
+                        param2, fps, colorFormatToString(colorFormat, -1), maxBframe);
+                argsList.add(testArgs);
+            }
+        } else {
+            Object[] testArgs = new Object[3];
+            testArgs[0] = arg[0];
+            testArgs[1] = getAudioEncoderCfgParams(mediaType, br, param1, param2, profiles);
+            testArgs[2] = String.format("%dkbps_%dkHz_%dch", br / 1000, param1 / 1000, param2);
+            argsList.add(testArgs);
+        }
+        return argsList;
+    }
+
     private static EncoderConfigParams[] getVideoEncoderCfgParams(String mediaType, int bitRate,
             int width, int height, int frameRate, int colorFormat, int maxBframe, int[] profiles) {
         ArrayList<EncoderConfigParams> cfgParams = new ArrayList<>();
         for (int profile : profiles) {
             int level = getMinLevel(mediaType, width, height, frameRate, bitRate, profile);
-            if (mediaType.equals(MediaFormat.MIMETYPE_VIDEO_AVC)
-                    && maxBframe != 0
-                    && (profile == AVCProfileBaseline
-                    || profile == AVCProfileConstrainedBaseline)) {
-                continue;
+            if (maxBframe != 0) {
+                if (mediaType.equals(MediaFormat.MIMETYPE_VIDEO_AVC) && (
+                        profile == AVCProfileBaseline
+                                || profile == AVCProfileConstrainedBaseline)) {
+                    continue;
+                }
             }
             cfgParams.add(new EncoderConfigParams.Builder(mediaType)
                     .setBitRate(bitRate)
@@ -134,166 +168,133 @@
                 {MediaFormat.MIMETYPE_AUDIO_AAC, 64000, 48000, 1, -1},
                 {MediaFormat.MIMETYPE_AUDIO_AAC, 128000, 48000, 2, -1},
                 // Video - CodecMediaType, bit-rate, height, width, frame-rate
-                // TODO (b/151423508)
-                /*{MediaFormat.MIMETYPE_VIDEO_AVC, 64000, 176, 144, 15},
-                {MediaFormat.MIMETYPE_VIDEO_AVC, 128000, 176, 144, 15},
-                {MediaFormat.MIMETYPE_VIDEO_AVC, 192000, 352, 288, 7},
-                {MediaFormat.MIMETYPE_VIDEO_AVC, 384000, 352, 288, 15},*/
-                {MediaFormat.MIMETYPE_VIDEO_AVC, 768000, 352, 288, 30},
-                {MediaFormat.MIMETYPE_VIDEO_AVC, 2000000, 352, 288, 30},
-                // TODO (b/151423508)
-                /*{MediaFormat.MIMETYPE_VIDEO_AVC, 4000000, 352, 576, 25},
-                {MediaFormat.MIMETYPE_VIDEO_AVC, 4000000, 720, 576, 12},
-                {MediaFormat.MIMETYPE_VIDEO_AVC, 10000000, 720, 576, 25},*/
-                {MediaFormat.MIMETYPE_VIDEO_AVC, 14000000, 1280, 720, 30},
-                {MediaFormat.MIMETYPE_VIDEO_AVC, 20000000, 1280, 1024, 42},
-                {MediaFormat.MIMETYPE_VIDEO_AVC, 20000000, 2048, 1024, 30},
-                {MediaFormat.MIMETYPE_VIDEO_AVC, 50000000, 2048, 1024, 30},
-                {MediaFormat.MIMETYPE_VIDEO_AVC, 50000000, 2048, 1080, 60},
-                {MediaFormat.MIMETYPE_VIDEO_AVC, 135000000, 3672, 1536, 25},
-                {MediaFormat.MIMETYPE_VIDEO_AVC, 240000000, 4096, 2304, 25},
-                {MediaFormat.MIMETYPE_VIDEO_AVC, 240000000, 4096, 2304, 50},
-                {MediaFormat.MIMETYPE_VIDEO_AVC, 240000000, 8192, 4320, 30},
-                {MediaFormat.MIMETYPE_VIDEO_AVC, 480000000, 8192, 4320, 60},
-                {MediaFormat.MIMETYPE_VIDEO_AVC, 800000000, 8192, 4320, 120},
+                {MediaFormat.MIMETYPE_VIDEO_AVC, 64000, 128, 96, 30},  // AVCLevel1
+                {MediaFormat.MIMETYPE_VIDEO_AVC, 128000, 176, 144, 15}, // AVCLevel1b
+                {MediaFormat.MIMETYPE_VIDEO_AVC, 192000, 320, 240, 10},  // AVCLevel11
+                {MediaFormat.MIMETYPE_VIDEO_AVC, 384000, 320, 240, 20},  // AVCLevel12
+                {MediaFormat.MIMETYPE_VIDEO_AVC, 512000, 352, 240, 30},  // AVCLevel13
+                {MediaFormat.MIMETYPE_VIDEO_AVC, 832000, 352, 288, 30},  // AVCLevel2
+                {MediaFormat.MIMETYPE_VIDEO_AVC, 1000000, 352, 576, 25},  // AVCLevel21
+                {MediaFormat.MIMETYPE_VIDEO_AVC, 1500000, 640, 480, 15},  // AVCLevel22
+                {MediaFormat.MIMETYPE_VIDEO_AVC, 2000000, 720, 480, 30},  // AVCLevel3
+                {MediaFormat.MIMETYPE_VIDEO_AVC, 3000000, 1280, 720, 30},  // AVCLevel31
+                {MediaFormat.MIMETYPE_VIDEO_AVC, 6000000, 1280, 1024, 42},  // AVCLevel32
+                {MediaFormat.MIMETYPE_VIDEO_AVC, 10000000, 1920, 1088, 30},  // AVCLevel4
+                {MediaFormat.MIMETYPE_VIDEO_AVC, 25000000, 2048, 1024, 30},  // AVCLevel41
+                {MediaFormat.MIMETYPE_VIDEO_AVC, 50000000, 2048, 1088, 60},  // AVCLevel42
+                {MediaFormat.MIMETYPE_VIDEO_AVC, 60000000, 2560, 1920, 30},  // AVCLevel5
+                {MediaFormat.MIMETYPE_VIDEO_AVC, 80000000, 4096, 2048, 30},  // AVCLevel51
+                {MediaFormat.MIMETYPE_VIDEO_AVC, 120000000, 4096, 2160, 60},  // AVCLevel52
+                {MediaFormat.MIMETYPE_VIDEO_AVC, 240000000, 8192, 4320, 30},  // AVCLevel6
+                {MediaFormat.MIMETYPE_VIDEO_AVC, 480000000, 8192, 4320, 60},  // AVCLevel61
+                {MediaFormat.MIMETYPE_VIDEO_AVC, 800000000, 8192, 4320, 120},  // AVCLevel62
 
-                {MediaFormat.MIMETYPE_VIDEO_MPEG2, 4000000, 352, 288, 30},
-                {MediaFormat.MIMETYPE_VIDEO_MPEG2, 15000000, 720, 576, 30},
-                {MediaFormat.MIMETYPE_VIDEO_MPEG2, 60000000, 1440, 1088, 60},
-                {MediaFormat.MIMETYPE_VIDEO_MPEG2, 80000000, 1920, 1088, 60},
-                {MediaFormat.MIMETYPE_VIDEO_MPEG2, 80000000, 1920, 1088, 60},
+                {MediaFormat.MIMETYPE_VIDEO_MPEG2, 4000000, 352, 288, 30},  // MPEG2LevelLL
+                {MediaFormat.MIMETYPE_VIDEO_MPEG2, 15000000, 720, 480, 30},  // MPEG2LevelML
+                {MediaFormat.MIMETYPE_VIDEO_MPEG2, 60000000, 1440, 1088, 30},  // MPEG2LevelH14
+                {MediaFormat.MIMETYPE_VIDEO_MPEG2, 80000000, 1920, 1088, 30},  // MPEG2LevelHL
+                {MediaFormat.MIMETYPE_VIDEO_MPEG2, 80000000, 1920, 1088, 60},  // MPEG2LevelHP
 
-                {MediaFormat.MIMETYPE_VIDEO_MPEG4, 64000, 176, 144, 15},
-                {MediaFormat.MIMETYPE_VIDEO_MPEG4, 64000, 176, 144, 30},
-                {MediaFormat.MIMETYPE_VIDEO_MPEG4, 128000, 176, 144, 15},
-                {MediaFormat.MIMETYPE_VIDEO_MPEG4, 128000, 352, 288, 30},
-                {MediaFormat.MIMETYPE_VIDEO_MPEG4, 384000, 352, 288, 30},
-                {MediaFormat.MIMETYPE_VIDEO_MPEG4, 4000000, 640, 480, 30},
-                {MediaFormat.MIMETYPE_VIDEO_MPEG4, 8000000, 720, 576, 30},
-                {MediaFormat.MIMETYPE_VIDEO_MPEG4, 12000000, 1280, 720, 30},
-                {MediaFormat.MIMETYPE_VIDEO_MPEG4, 128000, 176, 144, 30},
-                {MediaFormat.MIMETYPE_VIDEO_MPEG4, 384000, 352, 288, 30},
-                {MediaFormat.MIMETYPE_VIDEO_MPEG4, 768000, 352, 288, 30},
-                {MediaFormat.MIMETYPE_VIDEO_MPEG4, 1500000, 352, 288, 30},
-                {MediaFormat.MIMETYPE_VIDEO_MPEG4, 3000000, 704, 576, 30},
-                {MediaFormat.MIMETYPE_VIDEO_MPEG4, 8000000, 720, 576, 30},
+                {MediaFormat.MIMETYPE_VIDEO_VP9, 200000, 256, 144, 15},  // VP9Level1
+                {MediaFormat.MIMETYPE_VIDEO_VP9, 512000, 384, 192, 30},  // VP9Level11
+                {MediaFormat.MIMETYPE_VIDEO_VP9, 1000000, 480, 256, 30},  // VP9Level2
+                {MediaFormat.MIMETYPE_VIDEO_VP9, 1500000, 640, 384, 30},  // VP9Level21
+                {MediaFormat.MIMETYPE_VIDEO_VP9, 1600000, 720, 480, 30},  // VP9Level3
+                {MediaFormat.MIMETYPE_VIDEO_VP9, 4000000, 1280, 720, 30},  // VP9Level31
+                {MediaFormat.MIMETYPE_VIDEO_VP9, 5000000, 1920, 1080, 30},  // VP9Level4
+                {MediaFormat.MIMETYPE_VIDEO_VP9, 16000000, 2048, 1088, 60},  // VP9Level41
+                {MediaFormat.MIMETYPE_VIDEO_VP9, 20000000, 3840, 2160, 30},  // VP9Level5
+                {MediaFormat.MIMETYPE_VIDEO_VP9, 80000000, 4096, 2176, 60},  // VP9Level51
+                {MediaFormat.MIMETYPE_VIDEO_VP9, 160000000, 4096, 2176, 120},  // VP9Level52
+                {MediaFormat.MIMETYPE_VIDEO_VP9, 180000000, 8192, 4352, 30},  // VP9Level6
+                {MediaFormat.MIMETYPE_VIDEO_VP9, 240000000, 8192, 4352, 60},  // VP9Level61
+                {MediaFormat.MIMETYPE_VIDEO_VP9, 480000000, 8192, 4352, 120},  // VP9Level62
 
-                {MediaFormat.MIMETYPE_VIDEO_VP9, 200000, 256, 144, 15},
-                {MediaFormat.MIMETYPE_VIDEO_VP9, 8000000, 384, 192, 30},
-                {MediaFormat.MIMETYPE_VIDEO_VP9, 1800000, 480, 256, 30},
-                {MediaFormat.MIMETYPE_VIDEO_VP9, 3600000, 640, 384, 30},
-                {MediaFormat.MIMETYPE_VIDEO_VP9, 7200000, 1080, 512, 30},
-                {MediaFormat.MIMETYPE_VIDEO_VP9, 12000000, 1280, 768, 30},
-                {MediaFormat.MIMETYPE_VIDEO_VP9, 18000000, 2048, 1088, 30},
-                {MediaFormat.MIMETYPE_VIDEO_VP9, 30000000, 2048, 1088, 60},
-                {MediaFormat.MIMETYPE_VIDEO_VP9, 60000000, 4096, 2176, 30},
-                {MediaFormat.MIMETYPE_VIDEO_VP9, 120000000, 4096, 2176, 60},
-                {MediaFormat.MIMETYPE_VIDEO_VP9, 180000000, 4096, 2176, 120},
-                {MediaFormat.MIMETYPE_VIDEO_VP9, 180000000, 8192, 4352, 30},
-                {MediaFormat.MIMETYPE_VIDEO_VP9, 240000000, 8192, 4352, 60},
-                {MediaFormat.MIMETYPE_VIDEO_VP9, 480000000, 8192, 4352, 120},
+                {MediaFormat.MIMETYPE_VIDEO_H263, 64000, 176, 144, 15},  // H263Level10
+                {MediaFormat.MIMETYPE_VIDEO_H263, 128000, 176, 144, 15},  // H263Level45
+                {MediaFormat.MIMETYPE_VIDEO_H263, 128000, 352, 288, 15},  // H263Level20
+                {MediaFormat.MIMETYPE_VIDEO_H263, 384000, 352, 288, 30},  // H263Level30
+                {MediaFormat.MIMETYPE_VIDEO_H263, 2048000, 352, 288, 30},  // H263Level40
+                {MediaFormat.MIMETYPE_VIDEO_H263, 4096000, 352, 240, 60},  // H263Level50
+                {MediaFormat.MIMETYPE_VIDEO_H263, 8192000, 720, 240, 60},  // H263Level60
+                {MediaFormat.MIMETYPE_VIDEO_H263, 16384000, 720, 576, 50},  // H263Level70
 
-                {MediaFormat.MIMETYPE_VIDEO_H263, 64000, 176, 144, 15},
-                {MediaFormat.MIMETYPE_VIDEO_H263, 128000, 176, 144, 15},
-                {MediaFormat.MIMETYPE_VIDEO_H263, 128000, 176, 144, 30},
-                {MediaFormat.MIMETYPE_VIDEO_H263, 128000, 352, 288, 15},
-                {MediaFormat.MIMETYPE_VIDEO_H263, 384000, 352, 288, 30},
-                {MediaFormat.MIMETYPE_VIDEO_H263, 2048000, 352, 288, 30},
-                {MediaFormat.MIMETYPE_VIDEO_H263, 4096000, 352, 240, 60},
-                {MediaFormat.MIMETYPE_VIDEO_H263, 4096000, 352, 288, 50},
-                {MediaFormat.MIMETYPE_VIDEO_H263, 8192000, 720, 240, 60},
-                {MediaFormat.MIMETYPE_VIDEO_H263, 8192000, 720, 288, 50},
-                {MediaFormat.MIMETYPE_VIDEO_H263, 16384000, 720, 480, 60},
-                {MediaFormat.MIMETYPE_VIDEO_H263, 16384000, 720, 576, 50},
+                {MediaFormat.MIMETYPE_VIDEO_HEVC, 128000, 176, 144, 15},  // HEVCMainTierLevel1
+                {MediaFormat.MIMETYPE_VIDEO_HEVC, 512000, 352, 288, 30},  // HEVCMainTierLevel2
+                {MediaFormat.MIMETYPE_VIDEO_HEVC, 1000000, 640, 360, 30},  // HEVCMainTierLevel21
+                {MediaFormat.MIMETYPE_VIDEO_HEVC, 1000000, 512, 512, 30},  // HEVCMainTierLevel3
+                {MediaFormat.MIMETYPE_VIDEO_HEVC, 1600000, 720, 480, 30},  // HEVCMainTierLevel3
+                {MediaFormat.MIMETYPE_VIDEO_HEVC, 4000000, 1280, 720, 30},  // HEVCMainTierLevel31
+                {MediaFormat.MIMETYPE_VIDEO_HEVC, 5000000, 1920, 1080, 30},  // HEVCMainTierLevel4
+                {MediaFormat.MIMETYPE_VIDEO_HEVC, 16000000, 1920, 1080, 30},  // HEVCHighTierLevel4
+                {MediaFormat.MIMETYPE_VIDEO_HEVC, 20000000, 1920, 1080, 60},  // HEVCMainTierLevel41
+                {MediaFormat.MIMETYPE_VIDEO_HEVC, 30000000, 1920, 1080, 60},  // HEVCHighTierLevel41
+                {MediaFormat.MIMETYPE_VIDEO_HEVC, 20000000, 3840, 2160, 30},  // HEVCMainTierLevel5
+                {MediaFormat.MIMETYPE_VIDEO_HEVC, 50000000, 3840, 2160, 30},  // HEVCHighTierLevel5
+                {MediaFormat.MIMETYPE_VIDEO_HEVC, 40000000, 3840, 2160, 60},  // HEVCMainTierLevel51
+                {MediaFormat.MIMETYPE_VIDEO_HEVC, 80000000, 3840, 2160, 60},  // HEVCHighTierLevel51
+                {MediaFormat.MIMETYPE_VIDEO_HEVC, 50000000, 3840, 2160, 120}, // HEVCMainTierLevel52
+                {MediaFormat.MIMETYPE_VIDEO_HEVC, 100000000, 3840, 2160, 120}, //HEVCHighTierLevel52
+                {MediaFormat.MIMETYPE_VIDEO_HEVC, 50000000, 7680, 4320, 30},  // HEVCMainTierLevel6
+                {MediaFormat.MIMETYPE_VIDEO_HEVC, 80000000, 7680, 4320, 30},  // HEVCHighTierLevel6
+                {MediaFormat.MIMETYPE_VIDEO_HEVC, 100000000, 7680, 4320, 60}, // HEVCMainTierLevel61
+                {MediaFormat.MIMETYPE_VIDEO_HEVC, 240000000, 7680, 4320, 60}, // HEVCHighTierLevel61
+                {MediaFormat.MIMETYPE_VIDEO_HEVC, 200000000, 7680, 4320, 120}, //HEVCMainTierLevel62
+                {MediaFormat.MIMETYPE_VIDEO_HEVC, 360000000, 7680, 4320, 120}, //HEVCHighTierLevel62
 
-                {MediaFormat.MIMETYPE_VIDEO_HEVC, 128000, 176, 144, 15},
-                {MediaFormat.MIMETYPE_VIDEO_HEVC, 1500000, 352, 288, 30},
-                // TODO (b/152576008) - Limit HEVC Encoder test to 512x512
-                {MediaFormat.MIMETYPE_VIDEO_HEVC, 3000000, 512, 512, 30},
-                //{MediaFormat.MIMETYPE_VIDEO_HEVC, 3000000, 640, 360, 30},
-                //{MediaFormat.MIMETYPE_VIDEO_HEVC, 6000000, 960, 540, 30},
-                {MediaFormat.MIMETYPE_VIDEO_HEVC, 10000000, 1280, 720, 33},
-                {MediaFormat.MIMETYPE_VIDEO_HEVC, 12000000, 2048, 1080, 30},
-                {MediaFormat.MIMETYPE_VIDEO_HEVC, 20000000, 2048, 1080, 60},
-                {MediaFormat.MIMETYPE_VIDEO_HEVC, 25000000, 4096, 2160, 30},
-                {MediaFormat.MIMETYPE_VIDEO_HEVC, 40000000, 4096, 2160, 60},
-                {MediaFormat.MIMETYPE_VIDEO_HEVC, 60000000, 4096, 2160, 120},
-                {MediaFormat.MIMETYPE_VIDEO_HEVC, 60000000, 8192, 4320, 30},
-                {MediaFormat.MIMETYPE_VIDEO_HEVC, 120000000, 8192, 4320, 60},
-                {MediaFormat.MIMETYPE_VIDEO_HEVC, 240000000, 8192, 4320, 120},
-
-                {MediaFormat.MIMETYPE_VIDEO_AV1, 1500000, 426, 240, 30},
-                {MediaFormat.MIMETYPE_VIDEO_AV1, 3000000, 640, 360, 30},
-                {MediaFormat.MIMETYPE_VIDEO_AV1, 6000000, 854, 480, 30},
-                {MediaFormat.MIMETYPE_VIDEO_AV1, 10000000, 1280, 720, 30},
-                {MediaFormat.MIMETYPE_VIDEO_AV1, 12000000, 1920, 1080, 30},
-                {MediaFormat.MIMETYPE_VIDEO_AV1, 20000000, 1920, 1080, 60},
-                {MediaFormat.MIMETYPE_VIDEO_AV1, 30000000, 3840, 2160, 30},
-                {MediaFormat.MIMETYPE_VIDEO_AV1, 40000000, 3840, 2160, 60},
-                {MediaFormat.MIMETYPE_VIDEO_AV1, 60000000, 3840, 2160, 120},
-                {MediaFormat.MIMETYPE_VIDEO_AV1, 60000000, 7680, 4320, 30},
-                {MediaFormat.MIMETYPE_VIDEO_AV1, 100000000, 7680, 4320, 60},
-                {MediaFormat.MIMETYPE_VIDEO_AV1, 160000000, 7680, 4320, 120},
-
-                {MediaFormat.MIMETYPE_VIDEO_VP8, 512000, 176, 144, 20},
-                {MediaFormat.MIMETYPE_VIDEO_VP8, 512000, 480, 360, 20},
+                {MediaFormat.MIMETYPE_VIDEO_AV1, 1500000, 426, 240, 30},  // AV1Level2
+                {MediaFormat.MIMETYPE_VIDEO_AV1, 3000000, 640, 360, 30},  // AV1Level21
+                {MediaFormat.MIMETYPE_VIDEO_AV1, 6000000, 854, 480, 30},  // AV1Level3
+                {MediaFormat.MIMETYPE_VIDEO_AV1, 10000000, 1280, 720, 30},  // AV1Level31
+                {MediaFormat.MIMETYPE_VIDEO_AV1, 12000000, 1920, 1080, 30},  // AV1Level4
+                {MediaFormat.MIMETYPE_VIDEO_AV1, 20000000, 1920, 1080, 60},  // AV1Level41
+                {MediaFormat.MIMETYPE_VIDEO_AV1, 30000000, 3840, 2160, 30},  // AV1Level5
+                {MediaFormat.MIMETYPE_VIDEO_AV1, 40000000, 3840, 2160, 60},  // AV1Level51
+                {MediaFormat.MIMETYPE_VIDEO_AV1, 60000000, 3840, 2160, 120},  // AV1Level52
+                {MediaFormat.MIMETYPE_VIDEO_AV1, 60000000, 7680, 4320, 30},  // AV1Level6
+                {MediaFormat.MIMETYPE_VIDEO_AV1, 100000000, 7680, 4320, 60},  // AV1Level61
+                {MediaFormat.MIMETYPE_VIDEO_AV1, 160000000, 7680, 4320, 120},  // AV1Level62
         };
         final List<Object[]> argsList = new ArrayList<>();
-        final int[] maxBFrames = {0, 2};
         for (Object[] arg : exhaustiveArgsList) {
             final String mediaType = (String) arg[0];
-            boolean isVideo = mediaType.startsWith("video/");
-            final int br = (int) arg[1];
-            final int param1 = (int) arg[2];
-            final int param2 = (int) arg[3];
-            final int fps = (int) arg[4];
-            Object[] testArgs = new Object[3];
-            testArgs[0] = arg[0];
-            if (isVideo) {
-                for (int maxBframe : maxBFrames) {
-                    if (maxBframe != 0) {
-                        if (!mediaType.equals(MediaFormat.MIMETYPE_VIDEO_AVC)
-                                && !mediaType.equals(MediaFormat.MIMETYPE_VIDEO_HEVC)) {
-                            continue;
-                        }
-                    }
-                    testArgs[1] = getVideoEncoderCfgParams(mediaType, br, param1, param2, fps,
-                            COLOR_FormatYUV420Flexible, maxBframe,
-                            Objects.requireNonNull(PROFILE_SDR_MAP.get(mediaType)));
-                    testArgs[2] = String.format("%dkbps_%dx%d_%dfps_%s_%d-bframes", br / 1000,
-                            param1, param2, fps, colorFormatToString(COLOR_FormatYUV420Flexible, 8),
-                            maxBframe);
-                    argsList.add(testArgs);
-                }
-            } else {
-                testArgs[1] = getAudioEncoderCfgParams(mediaType, br, param1, param2,
-                        Objects.requireNonNull(PROFILE_SDR_MAP.get(mediaType)));
-                testArgs[2] = String.format("%dkbps_%dkHz_%dch", br / 1000, param1 / 1000, param2);
-                argsList.add(testArgs);
-            }
-
+            argsList.addAll(prepareTestArgs(arg,
+                    Objects.requireNonNull(PROFILE_SDR_MAP.get(mediaType)),
+                    COLOR_FormatYUV420Flexible));
             // P010 support was added in Android T, hence limit the following tests to Android
             // T and above
             if (IS_AT_LEAST_T && PROFILE_HLG_MAP.get(mediaType) != null) {
-                for (int maxBframe : maxBFrames) {
-                    if (maxBframe != 0) {
-                        if (!mediaType.equals(MediaFormat.MIMETYPE_VIDEO_HEVC)
-                                && !mediaType.equals(MediaFormat.MIMETYPE_VIDEO_AVC)) {
-                            continue;
-                        }
-                    }
-                    testArgs = new Object[3];
-                    testArgs[0] = arg[0];
-                    testArgs[1] = getVideoEncoderCfgParams(mediaType, br, param1, param2, fps,
-                            COLOR_FormatYUVP010, maxBframe,
-                            Objects.requireNonNull(PROFILE_HLG_MAP.get(mediaType)));
-                    testArgs[2] = String.format("%dkbps_%dx%d_%dfps_%s_%d-bframes", br / 1000,
-                            param1, param2, fps, colorFormatToString(COLOR_FormatYUVP010, 10),
-                            maxBframe);
-                    argsList.add(testArgs);
-                }
+                argsList.addAll(prepareTestArgs(arg,
+                        Objects.requireNonNull(PROFILE_HLG_MAP.get(mediaType)),
+                        COLOR_FormatYUVP010));
             }
         }
+        final Object[][] mpeg4SimpleProfileArgsList = new Object[][]{
+                {MediaFormat.MIMETYPE_VIDEO_MPEG4, 64000, 176, 144, 15},  // MPEG4Level0 @sp
+                {MediaFormat.MIMETYPE_VIDEO_MPEG4, 128000, 176, 144, 15},  // MPEG4Level0b @sp
+                {MediaFormat.MIMETYPE_VIDEO_MPEG4, 64000, 128, 96, 30},  // MPEG4Level1 @sp
+                {MediaFormat.MIMETYPE_VIDEO_MPEG4, 128000, 352, 288, 15},  // MPEG4Level2 @sp
+                {MediaFormat.MIMETYPE_VIDEO_MPEG4, 384000, 352, 288, 30},  // MPEG4Level3 @sp
+                {MediaFormat.MIMETYPE_VIDEO_MPEG4, 4000000, 640, 480, 30},  // MPEG4Level4a @sp
+                {MediaFormat.MIMETYPE_VIDEO_MPEG4, 8000000, 720, 576, 24},  // MPEG4Level5 @sp
+                {MediaFormat.MIMETYPE_VIDEO_MPEG4, 12000000, 1280, 720, 30},  // MPEG4Level6 @sp
+        };
+        for (Object[] arg : mpeg4SimpleProfileArgsList) {
+            argsList.addAll(prepareTestArgs(arg, new int[]{MPEG4ProfileSimple},
+                    COLOR_FormatYUV420Flexible));
+        }
+        final Object[][] mpeg4AdvSimpleProfileArgsList = new Object[][]{
+                {MediaFormat.MIMETYPE_VIDEO_MPEG4, 128000, 176, 144, 30},  // MPEG4Level1 @ asp
+                {MediaFormat.MIMETYPE_VIDEO_MPEG4, 384000, 352, 288, 15},  // MPEG4Level2 @ asp
+                {MediaFormat.MIMETYPE_VIDEO_MPEG4, 768000, 352, 288, 30},  // MPEG4Level3 @ asp
+                {MediaFormat.MIMETYPE_VIDEO_MPEG4, 1500000, 352, 288, 30},  // MPEG4Level3b @ asp
+                {MediaFormat.MIMETYPE_VIDEO_MPEG4, 3000000, 704, 576, 15},  // MPEG4Level4 @ asp
+                {MediaFormat.MIMETYPE_VIDEO_MPEG4, 8000000, 720, 576, 30},  // MPEG4Level5 @ asp
+        };
+        for (Object[] arg : mpeg4AdvSimpleProfileArgsList) {
+            argsList.addAll(prepareTestArgs(arg, new int[]{MPEG4ProfileAdvancedSimple},
+                    COLOR_FormatYUV420Flexible));
+        }
         return prepareParamList(argsList, isEncoder, needAudio, needVideo, false);
     }
 
@@ -306,601 +307,134 @@
                 new Pair<>(new int[]{AVCProfileBaseline}, AVCLevel3));
         PROFILE_LEVEL_CDD.put(MediaFormat.MIMETYPE_VIDEO_HEVC,
                 new Pair<>(new int[]{HEVCProfileMain}, HEVCMainTierLevel3));
-        PROFILE_LEVEL_CDD.put(MediaFormat.MIMETYPE_VIDEO_VP8,
-                new Pair<>(new int[]{VP8ProfileMain}, VP8Level_Version0));
         PROFILE_LEVEL_CDD.put(MediaFormat.MIMETYPE_VIDEO_VP9,
                 new Pair<>(new int[]{VP9Profile0}, VP9Level3));
     }
 
-    private static int getMinLevel(String mediaType, int width, int height, int frameRate,
-            int bitrate, int profile) {
-        switch (mediaType) {
-            case MediaFormat.MIMETYPE_VIDEO_AVC:
-                return getMinLevelAVC(width, height, frameRate, bitrate);
-            case MediaFormat.MIMETYPE_VIDEO_HEVC:
-                return getMinLevelHEVC(width, height, frameRate, bitrate);
-            case MediaFormat.MIMETYPE_VIDEO_H263:
-                return getMinLevelH263(width, height, frameRate, bitrate);
-            case MediaFormat.MIMETYPE_VIDEO_MPEG2:
-                return getMinLevelMPEG2(width, height, frameRate, bitrate);
-            case MediaFormat.MIMETYPE_VIDEO_MPEG4:
-                return getMinLevelMPEG4(width, height, frameRate, bitrate, profile);
-            // complex features disabled in VP8 Level/Version 0
-            case MediaFormat.MIMETYPE_VIDEO_VP8:
-                return VP8Level_Version0;
-            case MediaFormat.MIMETYPE_VIDEO_VP9:
-                return getMinLevelVP9(width, height, frameRate, bitrate);
-            case MediaFormat.MIMETYPE_VIDEO_AV1:
-                return getMinLevelAV1(width, height, frameRate, bitrate);
-            default:
-                return -1;
-        }
-    }
-
-    private static int getMinLevelAVC(int width, int height, int frameRate, int bitrate) {
-        class LevelLimitAVC {
-            private LevelLimitAVC(int level, int mbsPerSec, long mbs, int bitrate) {
-                this.level = level;
-                this.mbsPerSec = mbsPerSec;
-                this.mbs = mbs;
-                this.bitrate = bitrate;
-            }
-
-            private final int level;
-            private final int mbsPerSec;
-            private final long mbs;
-            private final int bitrate;
-        }
-        LevelLimitAVC[] limitsAVC = {
-                new LevelLimitAVC(AVCLevel1, 1485, 99, 64000),
-                new LevelLimitAVC(AVCLevel1b, 1485, 99, 128000),
-                new LevelLimitAVC(AVCLevel11, 3000, 396, 192000),
-                new LevelLimitAVC(AVCLevel12, 6000, 396, 384000),
-                new LevelLimitAVC(AVCLevel13, 11880, 396, 768000),
-                new LevelLimitAVC(AVCLevel2, 11880, 396, 2000000),
-                new LevelLimitAVC(AVCLevel21, 19800, 792, 4000000),
-                new LevelLimitAVC(AVCLevel22, 20250, 1620, 4000000),
-                new LevelLimitAVC(AVCLevel3, 40500, 1620, 10000000),
-                new LevelLimitAVC(AVCLevel31, 108000, 3600, 14000000),
-                new LevelLimitAVC(AVCLevel32, 216000, 5120, 20000000),
-                new LevelLimitAVC(AVCLevel4, 245760, 8192, 20000000),
-                new LevelLimitAVC(AVCLevel41, 245760, 8192, 50000000),
-                new LevelLimitAVC(AVCLevel42, 522240, 8704, 50000000),
-                new LevelLimitAVC(AVCLevel5, 589824, 22080, 135000000),
-                new LevelLimitAVC(AVCLevel51, 983040, 36864, 240000000),
-                new LevelLimitAVC(AVCLevel52, 2073600, 36864, 240000000),
-                new LevelLimitAVC(AVCLevel6, 4177920, 139264, 240000000),
-                new LevelLimitAVC(AVCLevel61, 8355840, 139264, 480000000),
-                new LevelLimitAVC(AVCLevel62, 16711680, 139264, 800000000),
-        };
-        int mbs = ((width + 15) / 16) * ((height + 15) / 16);
-        float mbsPerSec = mbs * frameRate;
-        for (LevelLimitAVC levelLimitsAVC : limitsAVC) {
-            if (mbs <= levelLimitsAVC.mbs && mbsPerSec <= levelLimitsAVC.mbsPerSec
-                    && bitrate <= levelLimitsAVC.bitrate) {
-                return levelLimitsAVC.level;
-            }
-        }
-        // if none of the levels suffice, select the highest level
-        return AVCLevel62;
-    }
-
-    private static int getMinLevelHEVC(int width, int height, int frameRate, int bitrate) {
-        class LevelLimitHEVC {
-            private LevelLimitHEVC(int level, int frameRate, long samples, int bitrate) {
-                this.level = level;
-                this.frameRate = frameRate;
-                this.samples = samples;
-                this.bitrate = bitrate;
-            }
-
-            private final int level;
-            private final int frameRate;
-            private final long samples;
-            private final int bitrate;
-        }
-        LevelLimitHEVC[] limitsHEVC = {
-                new LevelLimitHEVC(HEVCMainTierLevel1, 15, 36864, 128000),
-                new LevelLimitHEVC(HEVCMainTierLevel2, 30, 122880, 1500000),
-                new LevelLimitHEVC(HEVCMainTierLevel21, 30, 245760, 3000000),
-                new LevelLimitHEVC(HEVCMainTierLevel3, 30, 552960, 6000000),
-                new LevelLimitHEVC(HEVCMainTierLevel31, 30, 983040, 10000000),
-                new LevelLimitHEVC(HEVCMainTierLevel4, 30, 2228224, 12000000),
-                new LevelLimitHEVC(HEVCHighTierLevel4, 30, 2228224, 30000000),
-                new LevelLimitHEVC(HEVCMainTierLevel41, 60, 2228224, 20000000),
-                new LevelLimitHEVC(HEVCHighTierLevel41, 60, 2228224, 50000000),
-                new LevelLimitHEVC(HEVCMainTierLevel5, 30, 8912896, 25000000),
-                new LevelLimitHEVC(HEVCHighTierLevel5, 30, 8912896, 100000000),
-                new LevelLimitHEVC(HEVCMainTierLevel51, 60, 8912896, 40000000),
-                new LevelLimitHEVC(HEVCHighTierLevel51, 60, 8912896, 160000000),
-                new LevelLimitHEVC(HEVCMainTierLevel52, 120, 8912896, 60000000),
-                new LevelLimitHEVC(HEVCHighTierLevel52, 120, 8912896, 240000000),
-                new LevelLimitHEVC(HEVCMainTierLevel6, 30, 35651584, 60000000),
-                new LevelLimitHEVC(HEVCHighTierLevel6, 30, 35651584, 240000000),
-                new LevelLimitHEVC(HEVCMainTierLevel61, 60, 35651584, 120000000),
-                new LevelLimitHEVC(HEVCHighTierLevel61, 60, 35651584, 480000000),
-                new LevelLimitHEVC(HEVCMainTierLevel62, 120, 35651584, 240000000),
-                new LevelLimitHEVC(HEVCHighTierLevel62, 120, 35651584, 800000000),
-        };
-        int samples = width * height;
-        for (LevelLimitHEVC levelLimitsHEVC : limitsHEVC) {
-            if (samples <= levelLimitsHEVC.samples && frameRate <= levelLimitsHEVC.frameRate
-                    && bitrate <= levelLimitsHEVC.bitrate) {
-                return levelLimitsHEVC.level;
-            }
-        }
-        // if none of the levels suffice, select the highest level
-        return HEVCHighTierLevel62;
-    }
-
-    private static int getMinLevelH263(int width, int height, int frameRate, int bitrate) {
-        class LevelLimitH263 {
-            private LevelLimitH263(int level, int height, int width, int frameRate,
-                    int bitrate) {
-                this.level = level;
-                this.height = height;
-                this.width = width;
-                this.frameRate = frameRate;
-                this.bitrate = bitrate;
-            }
-
-            private final int level;
-            private final int height;
-            private final int width;
-            private final int frameRate;
-            private final int bitrate;
-        }
-        LevelLimitH263[] limitsH263 = {
-                new LevelLimitH263(H263Level10, 176, 144, 15, 64000),
-                new LevelLimitH263(H263Level45, 176, 144, 15, 128000),
-                new LevelLimitH263(H263Level20, 176, 144, 30, 128000),
-                new LevelLimitH263(H263Level20, 352, 288, 15, 128000),
-                new LevelLimitH263(H263Level30, 352, 288, 30, 384000),
-                new LevelLimitH263(H263Level40, 352, 288, 30, 2048000),
-                new LevelLimitH263(H263Level50, 352, 240, 60, 4096000),
-                new LevelLimitH263(H263Level50, 352, 288, 50, 4096000),
-                new LevelLimitH263(H263Level60, 720, 240, 60, 8192000),
-                new LevelLimitH263(H263Level60, 720, 288, 50, 8192000),
-                new LevelLimitH263(H263Level70, 720, 480, 60, 16384000),
-                new LevelLimitH263(H263Level70, 720, 576, 50, 16384000),
-        };
-        for (LevelLimitH263 levelLimitsH263 : limitsH263) {
-            if (height <= levelLimitsH263.height && width <= levelLimitsH263.width &&
-                    frameRate <= levelLimitsH263.frameRate && bitrate <= levelLimitsH263.bitrate) {
-                return levelLimitsH263.level;
-            }
-        }
-        // if none of the levels suffice, select the highest level
-        return H263Level70;
-    }
-
-    private static int getMinLevelVP9(int width, int height, int frameRate, int bitrate) {
-        class LevelLimitVP9 {
-            private LevelLimitVP9(int level, long sampleRate, int size, int breadth,
-                    int bitrate) {
-                this.level = level;
-                this.sampleRate = sampleRate;
-                this.size = size;
-                this.breadth = breadth;
-                this.bitrate = bitrate;
-            }
-
-            private final int level;
-            private final long sampleRate;
-            private final int size;
-            private final int breadth;
-            private final int bitrate;
-        }
-        LevelLimitVP9[] limitsVP9 = {
-                new LevelLimitVP9(VP9Level1, 829440, 36864, 512, 200000),
-                new LevelLimitVP9(VP9Level11, 2764800, 73728, 768, 800000),
-                new LevelLimitVP9(VP9Level2, 4608000, 122880, 960, 1800000),
-                new LevelLimitVP9(VP9Level21, 9216000, 245760, 1344, 3600000),
-                new LevelLimitVP9(VP9Level3, 20736000, 552960, 2048, 7200000),
-                new LevelLimitVP9(VP9Level31, 36864000, 983040, 2752, 12000000),
-                new LevelLimitVP9(VP9Level4, 83558400, 2228224, 4160, 18000000),
-                new LevelLimitVP9(VP9Level41, 160432128, 2228224, 4160, 30000000),
-                new LevelLimitVP9(VP9Level5, 311951360, 8912896, 8384, 60000000),
-                new LevelLimitVP9(VP9Level51, 588251136, 8912896, 8384, 120000000),
-                new LevelLimitVP9(VP9Level52, 1176502272, 8912896, 8384, 180000000),
-                new LevelLimitVP9(VP9Level6, 1176502272, 35651584, 16832, 180000000),
-                new LevelLimitVP9(VP9Level61, 2353004544L, 35651584, 16832, 240000000),
-                new LevelLimitVP9(VP9Level62, 4706009088L, 35651584, 16832, 480000000),
-        };
-        int size = width * height;
-        int sampleRate = size * frameRate;
-        int breadth = Math.max(width, height);
-        for (LevelLimitVP9 levelLimitsVP9 : limitsVP9) {
-            if (sampleRate <= levelLimitsVP9.sampleRate && size <= levelLimitsVP9.size &&
-                    breadth <= levelLimitsVP9.breadth && bitrate <= levelLimitsVP9.bitrate) {
-                return levelLimitsVP9.level;
-            }
-        }
-        // if none of the levels suffice, select the highest level
-        return VP9Level62;
-    }
-
-    private static int getMinLevelMPEG2(int width, int height, int frameRate, int bitrate) {
-        class LevelLimitMPEG2 {
-            private LevelLimitMPEG2(int level, long sampleRate, int width, int height,
-                    int frameRate, int bitrate) {
-                this.level = level;
-                this.sampleRate = sampleRate;
-                this.width = width;
-                this.height = height;
-                this.frameRate = frameRate;
-                this.bitrate = bitrate;
-            }
-
-            private final int level;
-            private final long sampleRate;
-            private final int width;
-            private final int height;
-            private final int frameRate;
-            private final int bitrate;
-        }
-        // main profile limits, higher profiles will also support selected level
-        LevelLimitMPEG2[] limitsMPEG2 = {
-                new LevelLimitMPEG2(MPEG2LevelLL, 3041280, 352, 288, 30, 4000000),
-                new LevelLimitMPEG2(MPEG2LevelML, 10368000, 720, 576, 30, 15000000),
-                new LevelLimitMPEG2(MPEG2LevelH14, 47001600, 1440, 1088, 60, 60000000),
-                new LevelLimitMPEG2(MPEG2LevelHL, 62668800, 1920, 1088, 60, 80000000),
-                new LevelLimitMPEG2(MPEG2LevelHP, 125337600, 1920, 1088, 60, 80000000),
-        };
-        int size = width * height;
-        int sampleRate = size * frameRate;
-        for (LevelLimitMPEG2 levelLimitsMPEG2 : limitsMPEG2) {
-            if (sampleRate <= levelLimitsMPEG2.sampleRate && width <= levelLimitsMPEG2.width &&
-                    height <= levelLimitsMPEG2.height && frameRate <= levelLimitsMPEG2.frameRate &&
-                    bitrate <= levelLimitsMPEG2.bitrate) {
-                return levelLimitsMPEG2.level;
-            }
-        }
-        // if none of the levels suffice, select the highest level
-        return MPEG2LevelHP;
-    }
-
-    private static int getMinLevelMPEG4(int width, int height, int frameRate, int bitrate,
-            int profile) {
-        class LevelLimitMPEG4 {
-            private LevelLimitMPEG4(int profile, int level, long sampleRate, int width,
-                    int height, int frameRate, int bitrate) {
-                this.profile = profile;
-                this.level = level;
-                this.sampleRate = sampleRate;
-                this.width = width;
-                this.height = height;
-                this.frameRate = frameRate;
-                this.bitrate = bitrate;
-            }
-
-            private final int profile;
-            private final int level;
-            private final long sampleRate;
-            private final int width;
-            private final int height;
-            private final int frameRate;
-            private final int bitrate;
-        }
-        // simple profile limits, higher profiles will also support selected level
-        LevelLimitMPEG4[] limitsMPEG4 = {
-                new LevelLimitMPEG4(MPEG4ProfileSimple, MPEG4Level0, 380160, 176, 144, 15, 64000),
-                new LevelLimitMPEG4(MPEG4ProfileSimple, MPEG4Level1, 380160, 176, 144, 30, 64000),
-                new LevelLimitMPEG4(MPEG4ProfileSimple, MPEG4Level0b, 380160, 176, 144, 15, 128000),
-                new LevelLimitMPEG4(MPEG4ProfileSimple, MPEG4Level2, 1520640, 352, 288, 30, 128000),
-                new LevelLimitMPEG4(MPEG4ProfileSimple, MPEG4Level3, 3041280, 352, 288, 30, 384000),
-                new LevelLimitMPEG4(
-                        MPEG4ProfileSimple, MPEG4Level4a, 9216000, 640, 480, 30, 4000000),
-                new LevelLimitMPEG4(
-                        MPEG4ProfileSimple, MPEG4Level5, 10368000, 720, 576, 30, 8000000),
-                new LevelLimitMPEG4(
-                        MPEG4ProfileSimple, MPEG4Level6, 27648000, 1280, 720, 30, 12000000),
-                new LevelLimitMPEG4(
-                        MPEG4ProfileAdvancedSimple, MPEG4Level1, 760320, 176, 144, 30, 128000),
-                new LevelLimitMPEG4(
-                        MPEG4ProfileAdvancedSimple, MPEG4Level2, 1520640, 352, 288, 30, 384000),
-                new LevelLimitMPEG4(
-                        MPEG4ProfileAdvancedSimple, MPEG4Level3, 3041280, 352, 288, 30, 768000),
-                new LevelLimitMPEG4(
-                        MPEG4ProfileAdvancedSimple, MPEG4Level3b, 3041280, 352, 288, 30, 1500000),
-                new LevelLimitMPEG4(
-                        MPEG4ProfileAdvancedSimple, MPEG4Level4, 3041280, 704, 576, 30, 3000000),
-                new LevelLimitMPEG4(
-                        MPEG4ProfileAdvancedSimple, MPEG4Level5, 3041280, 720, 576, 30, 8000000),
-        };
-        int size = width * height;
-        int sampleRate = size * frameRate;
-        for (LevelLimitMPEG4 levelLimitsMPEG4 : limitsMPEG4) {
-            if (((profile & (MPEG4ProfileAdvancedSimple | MPEG4ProfileSimple)) != 0) &&
-                    profile != levelLimitsMPEG4.profile) continue;
-            if (sampleRate <= levelLimitsMPEG4.sampleRate && width <= levelLimitsMPEG4.width &&
-                    height <= levelLimitsMPEG4.height && frameRate <= levelLimitsMPEG4.frameRate &&
-                    bitrate <= levelLimitsMPEG4.bitrate) {
-                return levelLimitsMPEG4.level;
-            }
-        }
-        // if none of the levels suffice, select the highest level
-        return MPEG4Level6;
-    }
-
-    private static int getMinLevelAV1(int width, int height, int frameRate, int bitrate) {
-        class LevelLimitAV1 {
-            private LevelLimitAV1(int level, int size, int width, int height, long sampleRate,
-                    int bitrate) {
-                this.level = level;
-                this.size = size;
-                this.width = width;
-                this.height = height;
-                this.sampleRate = sampleRate;
-                this.bitrate = bitrate;
-            }
-
-            private final int level;
-            private final int size;
-            private final int width;
-            private final int height;
-            private final long sampleRate;
-            private final int bitrate;
-        }
-        // taking bitrate from main profile, will also be supported by high profile
-        LevelLimitAV1[] limitsAV1 = {
-                new LevelLimitAV1(AV1Level2, 147456, 2048, 1152, 4423680, 1500000),
-                new LevelLimitAV1(AV1Level21, 278784, 2816, 1584, 8363520, 3000000),
-                new LevelLimitAV1(AV1Level3, 665856, 4352, 2448, 19975680, 6000000),
-                new LevelLimitAV1(AV1Level31, 1065024, 5504, 3096, 31950720, 10000000),
-                new LevelLimitAV1(AV1Level4, 2359296, 6144, 3456, 70778880, 12000000),
-                new LevelLimitAV1(AV1Level41, 2359296, 6144, 3456, 141557760, 20000000),
-                new LevelLimitAV1(AV1Level5, 8912896, 8192, 4352, 267386880, 30000000),
-                new LevelLimitAV1(AV1Level51, 8912896, 8192, 4352, 534773760, 40000000),
-                new LevelLimitAV1(AV1Level52, 8912896, 8192, 4352, 1069547520, 60000000),
-                new LevelLimitAV1(AV1Level53, 8912896, 8192, 4352, 1069547520, 60000000),
-                new LevelLimitAV1(AV1Level6, 35651584, 16384, 8704, 1069547520, 60000000),
-                new LevelLimitAV1(AV1Level61, 35651584, 16384, 8704, 2139095040, 100000000),
-                new LevelLimitAV1(AV1Level62, 35651584, 16384, 8704, 4278190080L, 160000000),
-                new LevelLimitAV1(AV1Level63, 35651584, 16384, 8704, 4278190080L, 160000000),
-        };
-        int size = width * height;
-        int sampleRate = size * frameRate;
-        for (LevelLimitAV1 levelLimitsAV1 : limitsAV1) {
-            if (size <= levelLimitsAV1.size && width <= levelLimitsAV1.width &&
-                    height <= levelLimitsAV1.height && sampleRate <= levelLimitsAV1.sampleRate &&
-                    bitrate <= levelLimitsAV1.bitrate) {
-                return levelLimitsAV1.level;
-            }
-        }
-        // if none of the levels suffice or high profile, select the highest level
-        return AV1Level73;
-    }
-
-    private int getAacProfile(MediaFormat format) {
-        int aacProfile = format.getInteger(MediaFormat.KEY_AAC_PROFILE, -1);
-        int profile = format.getInteger(MediaFormat.KEY_PROFILE, -1);
-
-        if (aacProfile != -1 && profile != -1) {
-            assertEquals(String.format("aac-profile :- %d and profile :- %d are different.",
-                    aacProfile, profile), aacProfile, profile);
-            return aacProfile;
-        } else if (aacProfile != -1) {
-            return aacProfile;
-        } else if (profile != -1) {
-            return profile;
+    void checkIfTrackFormatIsOk(MediaFormat trackFormat) {
+        assertEquals("Input media type and extracted media type are not identical " + mTestEnv
+                        + mTestConfig, mActiveEncCfg.mMediaType,
+                trackFormat.getString(MediaFormat.KEY_MIME));
+        if (mIsVideo) {
+            assertEquals("Input width and extracted width are not same " + mTestEnv + mTestConfig,
+                    mActiveEncCfg.mWidth, getWidth(trackFormat));
+            assertEquals("Input height and extracted height are not same " + mTestEnv + mTestConfig,
+                    mActiveEncCfg.mHeight, getHeight(trackFormat));
         } else {
-            Log.e(LOG_TAG, "format doesn't contain either KEY_AAC_PROFILE or KEY_PROFILE");
-            return -1;
+            int expSampleRate = mActiveEncCfg.mProfile != AACObjectHE ? mActiveEncCfg.mSampleRate
+                    : mActiveEncCfg.mSampleRate / 2;
+            int expChCount = mActiveEncCfg.mProfile != AACObjectHE_PS ? mActiveEncCfg.mChannelCount
+                    : mActiveEncCfg.mChannelCount / 2;
+            assertEquals("Input sample rate and extracted sample rate are not same " + mTestEnv
+                            + mTestConfig, expSampleRate,
+                    trackFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE));
+            assertEquals("Input channe count and extracted channel count are not same " + mTestEnv
+                            + mTestConfig, expChCount,
+                    trackFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT));
         }
     }
 
-    @Override
-    public boolean isFormatSimilar(MediaFormat inpFormat, MediaFormat outFormat) {
-        if (!super.isFormatSimilar(inpFormat, outFormat)) {
-            Log.e(LOG_TAG, "Basic channel-rate/resolution comparisons failed");
-            return false;
-        }
-        String inpMediaType = inpFormat.getString(MediaFormat.KEY_MIME);
-        String outMediaType = outFormat.getString(MediaFormat.KEY_MIME);
-        assertEquals(String.format("input mediaType :- %s and output mediaType :- %s are "
-                        + "different.", inpMediaType, outMediaType), inpMediaType, outMediaType);
-        if (outMediaType.startsWith("audio/")) {
-            if (outFormat.getString(MediaFormat.KEY_MIME).equals(MediaFormat.MIMETYPE_AUDIO_AAC)) {
-                int inputProfileKey, outputProfileKey;
-                outputProfileKey = getAacProfile(outFormat);
-                inputProfileKey = getAacProfile(inpFormat);
-                if (outputProfileKey != inputProfileKey) {
-                    Log.e(LOG_TAG, "aac-profile in output " + outputProfileKey +
-                            " doesn't match configured input " + inputProfileKey);
-                    return false;
+    private boolean shallSupportProfileAndLevel(EncoderConfigParams cfg) {
+        Pair<int[], Integer> cddProfileLevel =
+                Objects.requireNonNull(PROFILE_LEVEL_CDD.get(cfg.mMediaType));
+        int[] profileCdd = cddProfileLevel.first;
+        int levelCdd = cddProfileLevel.second;
+        for (int cddProfile : profileCdd) {
+            if (cfg.mProfile == cddProfile) {
+                if (!cfg.mIsAudio) {
+                    if (cfg.mLevel <= levelCdd) {
+                        if (cfg.mMediaType.equalsIgnoreCase(MediaFormat.MIMETYPE_VIDEO_H263)
+                                && cfg.mLevel != MediaCodecInfo.CodecProfileLevel.H263Level45
+                                && cfg.mLevel > MediaCodecInfo.CodecProfileLevel.H263Level10) {
+                            continue;
+                        }
+                    } else {
+                        continue;
+                    }
                 }
+                return true;
             }
-        } else if (outMediaType.startsWith("video/")) {
-            if (!outFormat.containsKey(MediaFormat.KEY_PROFILE)) {
-                Log.e(LOG_TAG, "Output format doesn't contain profile key");
-                //TODO (b/151398466)
-                if (true) return true;
-                return false;
-            }
-            if (!outFormat.containsKey(MediaFormat.KEY_LEVEL)) {
-                Log.e(LOG_TAG, "Output format doesn't contain level key");
-                //TODO (b/151398466)
-                if (true) return true;
-                return false;
-            }
-            if (!inpFormat.containsKey(MediaFormat.KEY_PROFILE)) {
-                Log.e(LOG_TAG, "Input format doesn't contain profile key");
-                return false;
-            }
-            if (!inpFormat.containsKey(MediaFormat.KEY_LEVEL)) {
-                Log.e(LOG_TAG, "Input format doesn't contain level key");
-                return false;
-            }
-            if (outFormat.getInteger(MediaFormat.KEY_PROFILE)
-                    != inpFormat.getInteger(MediaFormat.KEY_PROFILE)) {
-                Log.e(LOG_TAG, "profile in output doesn't match configured input");
-                return false;
-            }
-            if (outFormat.getInteger(MediaFormat.KEY_LEVEL)
-                    != inpFormat.getInteger(MediaFormat.KEY_LEVEL)) {
-                Log.e(LOG_TAG, "level key in output doesn't match configured input");
-                return false;
-            }
-        } else {
-            Log.w(LOG_TAG, "non media mediaType:" + outMediaType);
         }
-        return true;
+        return false;
     }
 
     /**
-     * @see EncoderProfileLevelTest
-     * Besides the above, the test muxes the encoder output in all supported container formats
-     * and checks if muxers and extractors on device are signalling profile/level information
-     * correctly
+     * Check description of class {@link EncoderProfileLevelTest}
      */
     @CddTest(requirements = {"2.2.2/5.1/H-0-3", "2.2.2/5.1/H-0-4", "2.2.2/5.1/H-0-5", "5/C-0-3",
-            "5.2.1/C-1-1", "5.2.2/C-2-1", "5.2.3/C-2-1", "5.2.4/C-1-2",
-            "5.2.5/C-1-1"})
+            "5.2.1/C-1-1", "5.2.2/C-1-1", "5.2.4/C-1-2", "5.2.5/C-1-1"})
     @ApiTest(apis = {"android.media.MediaFormat#KEY_PROFILE",
             "android.media.MediaFormat#KEY_AAC_PROFILE",
             "android.media.MediaFormat#KEY_LEVEL"})
     @Test(timeout = PER_TEST_TIMEOUT_LARGE_TEST_MS)
     public void testValidateProfileLevel() throws IOException, InterruptedException {
-        if (mEncCfgParams[0].mInputBitDepth != 8) {
+        if (mIsVideo && mEncCfgParams[0].mInputBitDepth != 8) {
             Assume.assumeTrue(mCodecName + " doesn't support " + colorFormatToString(
                             mEncCfgParams[0].mColorFormat, mEncCfgParams[0].mInputBitDepth),
                     hasSupportForColorFormat(mCodecName, mMediaType,
-                                             mEncCfgParams[0].mColorFormat));
+                            mEncCfgParams[0].mColorFormat));
         }
         boolean cddSupportedMediaType = PROFILE_LEVEL_CDD.get(mMediaType) != null;
-        int[] profileCdd = new int[0];
-        int levelCdd = 0;
-        if (cddSupportedMediaType) {
-            Pair<int[], Integer> cddProfileLevel = PROFILE_LEVEL_CDD.get(mMediaType);
-            profileCdd = cddProfileLevel.first;
-            levelCdd = cddProfileLevel.second;
-        }
 
         {
             mActiveRawRes = EncoderInput.getRawResource(mEncCfgParams[0]);
             assertNotNull("no raw resource found for testing config : "
-                    + mActiveEncCfg + mTestConfig + mTestEnv, mActiveRawRes);
+                    + mEncCfgParams[0] + mTestConfig + mTestEnv, mActiveRawRes);
             setUpSource(mActiveRawRes.mFileName);
             mSaveToMem = true;
+            mMuxOutput = true;
             mOutputBuff = new OutputManager();
             mCodec = MediaCodec.createByCodecName(mCodecName);
             MediaCodecInfo.CodecCapabilities codecCapabilities =
                     mCodec.getCodecInfo().getCapabilitiesForType(mMediaType);
+            int configsTested = 0;
             for (EncoderConfigParams cfg : mEncCfgParams) {
                 mActiveEncCfg = cfg;
                 MediaFormat format = cfg.getFormat();
                 if (!codecCapabilities.isFormatSupported(format)) {
                     if (cddSupportedMediaType) {
-                        boolean shallSupportProfileLevel = false;
-                        if (mIsAudio) {
-                            for (int cddProfile : profileCdd) {
-                                if (cfg.mProfile == cddProfile) {
-                                    shallSupportProfileLevel = true;
-                                    break;
-                                }
-                            }
-                        } else if (cfg.mProfile == profileCdd[0] && cfg.mLevel <= levelCdd) {
-                            shallSupportProfileLevel = true;
-                        }
-                        if (shallSupportProfileLevel) {
+                        if (shallSupportProfileAndLevel(cfg)) {
                             ArrayList<MediaFormat> formats = new ArrayList<>();
                             formats.add(format);
-                            assertFalse(String.format("No components present on the device supports"
-                                    + " cdd required profile:- %d, level:- %d, encode format:- %s",
-                                    cfg.mProfile, cfg.mLevel, format),
-                                    selectCodecs(mMediaType, formats, null, false).isEmpty());
+                            assertFalse("No components present on the device supports cdd "
+                                    + "required encode format:- " + format + mTestConfig + mTestEnv,
+                                    selectCodecs(mMediaType, formats, null, true).isEmpty());
                         }
                         Log.d(LOG_TAG, mCodecName + " doesn't support format: " + format);
                     }
                     continue;
                 }
 
-                // Verify if device has an equivalent decoder for the current format
-                {
-                    ArrayList<MediaFormat> formatList = new ArrayList<>();
-                    formatList.add(format);
-                    assertTrue("Device advertises support for encoding " + format
-                                    + " but cannot decode it. \n" + mTestConfig + mTestEnv,
-                            selectCodecs(mMediaType, formatList, null, false).size() > 0);
-                }
-
                 mOutputBuff.reset();
                 configureCodec(format, false, true, true);
                 mCodec.start();
                 doWork(5);
                 queueEOS();
                 waitForAllOutputs();
-                MediaFormat outFormat = mCodec.getOutputFormat();
-                /* TODO(b/147348711) */
-                if (false) mCodec.stop();
-                else mCodec.reset();
+                mCodec.reset();
 
-                // TODO (b/151398466)
-                if (mMediaType.equals(MediaFormat.MIMETYPE_AUDIO_AAC)) {
-                    Assume.assumeTrue("neither KEY_AAC_PROFILE nor KEY_PROFILE are present",
-                            outFormat.containsKey(MediaFormat.KEY_AAC_PROFILE) ||
-                                    outFormat.containsKey(MediaFormat.KEY_PROFILE));
-                } else {
-                    Assume.assumeTrue("KEY_PROFILE not present",
-                            outFormat.containsKey(MediaFormat.KEY_PROFILE));
-                    Assume.assumeTrue(outFormat.containsKey(MediaFormat.KEY_LEVEL));
-                }
-                // TODO (b/166300446) avc mediaType fails validation
-                if (mMediaType.equals(MediaFormat.MIMETYPE_VIDEO_AVC)) {
-                    Log.w(LOG_TAG, "Skip validation for mediaType = " + mMediaType);
-                    continue;
-                }
-                // TODO (b/166305723) hevc mediaType fails validation
-                if (mMediaType.equals(MediaFormat.MIMETYPE_VIDEO_HEVC)) {
-                    Log.w(LOG_TAG, "Skip validation for mediaType = " + mMediaType);
-                    continue;
-                }
-                // TODO (b/166300448) h263 and mpeg4 mediaTypes fails validation
-                if (mMediaType.equals(MediaFormat.MIMETYPE_VIDEO_H263)
-                        || mMediaType.equals(MediaFormat.MIMETYPE_VIDEO_MPEG4)) {
-                    Log.w(LOG_TAG, "Skip validation for mediaType = " + mMediaType);
-                    continue;
-                }
-                // TODO (b/184889671) aac for profile AACObjectHE fails validation
-                // TODO (b/184890155) aac for profile AACObjectLD, AACObjectELD fails validation
-                if (mMediaType.equals(MediaFormat.MIMETYPE_AUDIO_AAC)) {
-                    if (cfg.mProfile == AACObjectHE || cfg.mProfile == AACObjectELD
-                            || cfg.mProfile == AACObjectLD) {
-                        Log.w(LOG_TAG, "Skip validation for mediaType = " + mMediaType
-                                + " profile " + cfg.mProfile);
-                        continue;
-                    }
-                }
-                String msg = String.format("Configured input format and received output format are "
-                        + "not similar. \nConfigured Input format is :- %s \nReceived Output "
-                        + "format is :- %s \n", format, outFormat);
-                assertTrue(msg + mTestConfig + mTestEnv, isFormatSimilar(format, outFormat));
+                MediaFormat trackFormat = validateProfileAndLevel();
 
-                for (int muxerFormat = MediaMuxer.OutputFormat.MUXER_OUTPUT_FIRST;
-                     muxerFormat <= MediaMuxer.OutputFormat.MUXER_OUTPUT_LAST; muxerFormat++) {
-                    if (!isMediaTypeContainerPairValid(mMediaType, muxerFormat)) continue;
-                    ByteBuffer mBuff = mOutputBuff.getBuffer();
-                    String tmpPath = getTempFilePath((cfg.mInputBitDepth == 10) ? "10bit" : "");
-                    muxOutput(tmpPath, muxerFormat, outFormat, mBuff, mInfoList);
-                    MediaExtractor extractor = new MediaExtractor();
-                    extractor.setDataSource(tmpPath);
-                    assertEquals("Should be only 1 track \n" + mTestConfig + mTestEnv, 1,
-                            extractor.getTrackCount());
-                    MediaFormat extractedFormat = extractor.getTrackFormat(0);
-                    if (!isFormatSimilar(outFormat, extractedFormat)) {
-                        msg = " Input format and extracted format are not similar. "
-                                + "\n Muxer input format :- " + outFormat
-                                + "\n Extracted format :- " + extractedFormat
-                                + "\n Muxer writer :- " + muxerFormat + "\n" + mTestConfig
-                                + mTestEnv;
-                        fail(msg);
-                    }
-                    extractor.release();
-                    new File(tmpPath).delete();
+                deleteMuxedFile();
+
+                // validate extracted format for mandatory keys
+                if (trackFormat != null) checkIfTrackFormatIsOk(trackFormat);
+
+                // Verify if device has an equivalent decoder for the current format
+                ArrayList<MediaFormat> formatList = new ArrayList<>();
+                if (mProfileLevel != null && mProfileLevel.second != -1
+                        && cfg.mLevel != mProfileLevel.second) {
+                    format.setInteger(MediaFormat.KEY_LEVEL, mProfileLevel.second);
                 }
+                formatList.add(format);
+                assertTrue("Device advertises support for encoding " + format + " but cannot"
+                                + " decode it. \n" + mTestConfig + mTestEnv,
+                        selectCodecs(mMediaType, formatList, null, false).size() > 0);
+                configsTested++;
             }
             mCodec.release();
+            Assume.assumeTrue("skipping test, formats not supported by component",
+                    configsTested > 0);
         }
     }
 }
diff --git a/tests/tests/hibernation/src/android/hibernation/cts/AppHibernationUtils.kt b/tests/tests/hibernation/src/android/hibernation/cts/AppHibernationUtils.kt
index 9021f8e..843f490 100644
--- a/tests/tests/hibernation/src/android/hibernation/cts/AppHibernationUtils.kt
+++ b/tests/tests/hibernation/src/android/hibernation/cts/AppHibernationUtils.kt
@@ -25,6 +25,7 @@
 import android.content.Context
 import android.content.Intent
 import android.content.pm.PackageManager
+import android.graphics.Point
 import android.os.Handler
 import android.os.Looper
 import android.os.ParcelFileDescriptor
@@ -235,7 +236,12 @@
         waitFindObject(uiAutomation, By.text("Open")).click()
     } else {
         runShellCommandOrThrow(CMD_EXPAND_NOTIFICATIONS)
-        waitFindNotification(notifSelector, NOTIF_FIND_TIMEOUT).click()
+        val notification = waitFindNotification(notifSelector, NOTIF_FIND_TIMEOUT)
+        if (FeatureUtil.isAutomotive()) {
+            notification.click(Point(0, 0))
+        } else {
+            notification.click()
+        }
     }
 }
 
diff --git a/tests/tests/media/codec/src/android/media/codec/cts/DecodeEditEncodeTest.java b/tests/tests/media/codec/src/android/media/codec/cts/DecodeEditEncodeTest.java
index a24638f..b584643 100644
--- a/tests/tests/media/codec/src/android/media/codec/cts/DecodeEditEncodeTest.java
+++ b/tests/tests/media/codec/src/android/media/codec/cts/DecodeEditEncodeTest.java
@@ -42,6 +42,7 @@
 import androidx.test.platform.app.InstrumentationRegistry;
 
 import com.android.compatibility.common.util.ApiLevelUtil;
+import com.android.compatibility.common.util.ApiTest;
 import com.android.compatibility.common.util.MediaUtils;
 
 import org.junit.Before;
@@ -270,6 +271,15 @@
         mUseHighBitDepth = useHighBitDepth;
     }
 
+    @ApiTest(apis = {"android.opengl.GLES20#GL_FRAGMENT_SHADER",
+            "android.opengl.GLES20#glReadPixels",
+            "android.opengl.GLES30#glReadPixels",
+            "android.media.format.MediaFormat#KEY_ALLOW_FRAME_DROP",
+            "android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatSurface",
+            "android.media.MediaCodecInfo.CodecCapabilities#COLOR_Format32bitABGR2101010",
+            "android.media.MediaFormat#KEY_COLOR_RANGE",
+            "android.media.MediaFormat#KEY_COLOR_STANDARD",
+            "android.media.MediaFormat#KEY_COLOR_TRANSFER"})
     @Test
     public void testVideoEdit() throws Throwable {
         VideoEditWrapper.runTest(this);
diff --git a/tests/tests/media/codec/src/android/media/codec/cts/EncodeDecodeTest.java b/tests/tests/media/codec/src/android/media/codec/cts/EncodeDecodeTest.java
index 79b5eac..894d4b1 100644
--- a/tests/tests/media/codec/src/android/media/codec/cts/EncodeDecodeTest.java
+++ b/tests/tests/media/codec/src/android/media/codec/cts/EncodeDecodeTest.java
@@ -44,6 +44,7 @@
 import androidx.test.filters.SmallTest;
 
 import com.android.compatibility.common.util.ApiLevelUtil;
+import com.android.compatibility.common.util.ApiTest;
 import com.android.compatibility.common.util.MediaUtils;
 
 import org.junit.Before;
@@ -1268,6 +1269,11 @@
      * a series of byte[] buffers and decoded into ByteBuffers.  The output is checked for
      * validity.
      */
+    @ApiTest(apis = {"android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatYUV420Planar",
+            "android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatYUV420PackedPlanar",
+            "android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatYUV420SemiPlanar",
+            "android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatYUV420PackedSemiPlanar",
+            "android.media.MediaCodecInfo.CodecCapabilities#COLOR_TI_FormatYUV420PackedSemiPlanar"})
     @Test
     public void testEncodeDecodeVideoFromBufferToBuffer() throws Exception {
         encodeDecodeVideoFromBuffer(false);
@@ -1285,6 +1291,16 @@
      * the test thread, so we have to hand control off to a new thread for the duration of
      * the test.
      */
+    @ApiTest(apis = {"android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatYUV420Planar",
+            "android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatYUV420PackedPlanar",
+            "android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatYUV420SemiPlanar",
+            "android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatYUV420PackedSemiPlanar",
+            "android.media.MediaCodecInfo.CodecCapabilities#COLOR_TI_FormatYUV420PackedSemiPlanar",
+            "android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatSurface",
+            "android.opengl.GLES20#glReadPixels",
+            "android.media.MediaFormat#KEY_COLOR_RANGE",
+            "android.media.MediaFormat#KEY_COLOR_STANDARD",
+            "android.media.MediaFormat#KEY_COLOR_TRANSFER"})
     @Test
     public void testEncodeDecodeVideoFromBufferToSurface() throws Throwable {
         BufferToSurfaceWrapper.runTest(this);
@@ -1294,11 +1310,22 @@
      * Tests streaming of AVC through the encoder and decoder.  Data is provided through
      * a Surface and decoded onto a Surface.  The output is checked for validity.
      */
+    @ApiTest(apis = {"android.media.MediaCodec#createInputSurface",
+            "android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatSurface",
+            "android.opengl.GLES20#glReadPixels",
+            "android.media.MediaFormat#KEY_COLOR_RANGE",
+            "android.media.MediaFormat#KEY_COLOR_STANDARD",
+            "android.media.MediaFormat#KEY_COLOR_TRANSFER"})
     @Test
     public void testEncodeDecodeVideoFromSurfaceToSurface() throws Throwable {
         SurfaceToSurfaceWrapper.runTest(this, false, false);
     }
-
+    @ApiTest(apis = {"AMediaCodec_createInputSurface",
+            "android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatSurface",
+            "android.opengl.GLES20#glReadPixels",
+            "android.media.MediaFormat#KEY_COLOR_RANGE",
+            "android.media.MediaFormat#KEY_COLOR_STANDARD",
+            "android.media.MediaFormat#KEY_COLOR_TRANSFER"})
     @Test
     public void testEncodeDecodeVideoFromSurfaceToSurfaceNdk() throws Throwable {
         SurfaceToSurfaceWrapper.runTest(this, false, USE_NDK);
@@ -1308,11 +1335,25 @@
      * Tests streaming of video through the encoder and decoder.  Data is provided through
      * a PersistentSurface and decoded onto a Surface.  The output is checked for validity.
      */
+    @ApiTest(apis = {"android.media.MediaCodec#createPersistentInputSurface",
+            "android.media.MediaCodec#setInputSurface",
+            "android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatSurface",
+            "android.opengl.GLES20#glReadPixels",
+            "android.media.MediaFormat#KEY_COLOR_RANGE",
+            "android.media.MediaFormat#KEY_COLOR_STANDARD",
+            "android.media.MediaFormat#KEY_COLOR_TRANSFER"})
     @Test
     public void testEncodeDecodeVideoFromSurfaceToPersistentSurface() throws Throwable {
         SurfaceToSurfaceWrapper.runTest(this, true, false);
     }
 
+    @ApiTest(apis = {"AMediaCodec_createPersistentInputSurface",
+            "AMediaCodec_setInputSurface",
+            "android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatSurface",
+            "android.opengl.GLES20#glReadPixels",
+            "android.media.MediaFormat#KEY_COLOR_RANGE",
+            "android.media.MediaFormat#KEY_COLOR_STANDARD",
+            "android.media.MediaFormat#KEY_COLOR_TRANSFER"})
     @Test
     public void testEncodeDecodeVideoFromSurfaceToPersistentSurfaceNdk() throws Throwable {
         SurfaceToSurfaceWrapper.runTest(this, true, USE_NDK);
diff --git a/tests/tests/media/codec/src/android/media/codec/cts/EncodeVirtualDisplayTest.java b/tests/tests/media/codec/src/android/media/codec/cts/EncodeVirtualDisplayTest.java
old mode 100755
new mode 100644
index ca08397..fa4e2ac
--- a/tests/tests/media/codec/src/android/media/codec/cts/EncodeVirtualDisplayTest.java
+++ b/tests/tests/media/codec/src/android/media/codec/cts/EncodeVirtualDisplayTest.java
@@ -48,6 +48,7 @@
 import androidx.test.platform.app.InstrumentationRegistry;
 
 import com.android.compatibility.common.util.ApiLevelUtil;
+import com.android.compatibility.common.util.ApiTest;
 import com.android.compatibility.common.util.MediaUtils;
 
 import org.junit.Before;
@@ -189,6 +190,13 @@
      *
      * @throws Exception
      */
+    @ApiTest(apis = {"AMediaCodec_createInputSurface",
+            "android.hardware.display.DisplayManager#createVirtualDisplay",
+            "android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatSurface",
+            "android.opengl.GLES20#glReadPixels",
+            "android.media.MediaFormat#KEY_COLOR_RANGE",
+            "android.media.MediaFormat#KEY_COLOR_STANDARD",
+            "android.media.MediaFormat#KEY_COLOR_TRANSFER"})
     @Test
     public void testEncodeVirtualDisplay() throws Throwable {
         if (!MediaUtils.check(sIsAtLeastR, "test needs Android 11")) return;
diff --git a/tests/tests/media/codec/src/android/media/codec/cts/ExtractDecodeEditEncodeMuxTest.java b/tests/tests/media/codec/src/android/media/codec/cts/ExtractDecodeEditEncodeMuxTest.java
index 67c3ce8..1225822 100644
--- a/tests/tests/media/codec/src/android/media/codec/cts/ExtractDecodeEditEncodeMuxTest.java
+++ b/tests/tests/media/codec/src/android/media/codec/cts/ExtractDecodeEditEncodeMuxTest.java
@@ -41,7 +41,7 @@
 
 import androidx.test.ext.junit.runners.AndroidJUnit4;
 
-import com.android.compatibility.common.util.CddTest;
+import com.android.compatibility.common.util.ApiTest;
 import com.android.compatibility.common.util.Preconditions;
 
 import org.junit.After;
@@ -69,6 +69,30 @@
  * <p>It also tests the way the codec config buffers need to be passed from the MediaCodec to the
  * MediaMuxer.
  */
+@ApiTest(apis = {"android.opengl.GLES20#GL_FRAGMENT_SHADER",
+        "android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatSurface",
+        "android.media.MediaFormat#KEY_BIT_RATE",
+        "android.media.MediaFormat#KEY_COLOR_FORMAT",
+        "android.media.MediaFormat#KEY_FRAME_RATE",
+        "android.media.MediaFormat#KEY_I_FRAME_INTERVAL",
+        "android.media.MediaFormat#KEY_SAMPLE_RATE",
+        "android.media.MediaFormat#KEY_CHANNEL_COUNT",
+        "android.media.MediaFormat#KEY_PROFILE",
+        "android.media.MediaFormat#KEY_AAC_PROFILE",
+        "android.media.MediaExtractor#setDataSource",
+        "android.media.MediaExtractor#getTrackCount",
+        "android.media.MediaExtractor#getTrackFormat",
+        "android.media.MediaExtractor#selectTrack",
+        "android.media.MediaExtractor#readSampleData",
+        "android.media.MediaExtractor#getSampleTime",
+        "android.media.MediaExtractor#getSampleFlags",
+        "android.media.MediaExtractor#advance",
+        "android.media.MediaExtractor#release",
+        "android.media.MediaMuxer#start",
+        "android.media.MediaMuxer#stop",
+        "android.media.MediaMuxer#addTrack",
+        "android.media.MediaMuxer#writeSampleData",
+        "android.media.MediaMuxer#release"})
 @TargetApi(18)
 @AppModeFull(reason = "Instant apps cannot access the SD card")
 @RunWith(AndroidJUnit4.class)
@@ -145,7 +169,6 @@
         super.tearDown();
     }
 
-    @CddTest(requirements = {"5.2", "5.3"})
     @Test
     public void testExtractDecodeEditEncodeMuxQCIF() throws Throwable {
         if(!setSize(176, 144)) return;
@@ -155,7 +178,6 @@
         TestWrapper.runTest(this);
     }
 
-    @CddTest(requirements = {"5.2", "5.3"})
     @Test
     public void testExtractDecodeEditEncodeMuxQVGA() throws Throwable {
         if(!setSize(320, 240)) return;
@@ -165,7 +187,6 @@
         TestWrapper.runTest(this);
     }
 
-    @CddTest(requirements = {"5.2", "5.3"})
     @Test
     public void testExtractDecodeEditEncodeMux720p() throws Throwable {
         if(!setSize(1280, 720)) return;
@@ -175,7 +196,6 @@
         TestWrapper.runTest(this);
     }
 
-    @CddTest(requirements = {"5.2", "5.3"})
     @Test
     public void testExtractDecodeEditEncodeMux2160pHevc() throws Throwable {
         if(!setSize(3840, 2160)) return;
@@ -185,7 +205,6 @@
         TestWrapper.runTest(this);
     }
 
-    @CddTest(requirements = {"5.1.1", "5.1.2"})
     @Test
     public void testExtractDecodeEditEncodeMuxAudio() throws Throwable {
         if(!setSize(1280, 720)) return;
@@ -195,7 +214,6 @@
         TestWrapper.runTest(this);
     }
 
-    @CddTest(requirements = {"5.1.1", "5.1.2", "5.2", "5.3"})
     @Test
     public void testExtractDecodeEditEncodeMuxAudioVideo() throws Throwable {
         if(!setSize(1280, 720)) return;
diff --git a/tests/tests/media/codec/src/android/media/codec/cts/MediaCodecBlockModelTest.java b/tests/tests/media/codec/src/android/media/codec/cts/MediaCodecBlockModelTest.java
index 29a14a0..5822063 100644
--- a/tests/tests/media/codec/src/android/media/codec/cts/MediaCodecBlockModelTest.java
+++ b/tests/tests/media/codec/src/android/media/codec/cts/MediaCodecBlockModelTest.java
@@ -41,6 +41,7 @@
 import androidx.test.filters.SmallTest;
 
 import com.android.compatibility.common.util.ApiLevelUtil;
+import com.android.compatibility.common.util.ApiTest;
 import com.android.compatibility.common.util.MediaUtils;
 import com.android.compatibility.common.util.NonMainlineTest;
 import com.android.compatibility.common.util.Preconditions;
@@ -92,6 +93,7 @@
     @Presubmit
     @SmallTest
     @RequiresDevice
+    @ApiTest(apis = "MediaCodec#CONFIGURE_FLAG_USE_BLOCK_MODEL")
     @Test
     public void testDecodeShortVideo() throws InterruptedException {
         if (!MediaUtils.check(mIsAtLeastR, "test needs Android 11")) return;
@@ -112,6 +114,7 @@
     @Presubmit
     @SmallTest
     @RequiresDevice
+    @ApiTest(apis = "MediaCodec#CONFIGURE_FLAG_USE_BLOCK_MODEL")
     @Test
     public void testDecodeShortAudio() throws InterruptedException {
         if (!MediaUtils.check(mIsAtLeastR, "test needs Android 11")) return;
@@ -132,6 +135,7 @@
     @Presubmit
     @SmallTest
     @RequiresDevice
+    @ApiTest(apis = "MediaCodec#CONFIGURE_FLAG_USE_BLOCK_MODEL")
     @Test
     public void testEncodeShortAudio() throws InterruptedException {
         if (!MediaUtils.check(mIsAtLeastR, "test needs Android 11")) return;
@@ -145,6 +149,7 @@
     @Presubmit
     @SmallTest
     @RequiresDevice
+    @ApiTest(apis = "MediaCodec#CONFIGURE_FLAG_USE_BLOCK_MODEL")
     @Test
     public void testEncodeShortVideo() throws InterruptedException {
         if (!MediaUtils.check(mIsAtLeastR, "test needs Android 11")) return;
diff --git a/tests/tests/media/codec/src/android/media/codec/cts/MediaCodecResourceTest.java b/tests/tests/media/codec/src/android/media/codec/cts/MediaCodecResourceTest.java
index bb769ca..b94f2de 100644
--- a/tests/tests/media/codec/src/android/media/codec/cts/MediaCodecResourceTest.java
+++ b/tests/tests/media/codec/src/android/media/codec/cts/MediaCodecResourceTest.java
@@ -45,6 +45,8 @@
 import androidx.test.filters.SmallTest;
 import androidx.test.platform.app.InstrumentationRegistry;
 
+import com.android.compatibility.common.util.ApiTest;
+
 import org.junit.Test;
 
 import java.util.ArrayList;
@@ -80,6 +82,7 @@
         public final int uid;
     }
 
+    @ApiTest(apis = "MediaCodec#createByCodecNameForClient")
     @Test
     public void testCreateCodecForAnotherProcessWithoutPermissionsThrows() throws Exception {
         CodecInfo codecInfo = getFirstVideoHardwareDecoder();
@@ -102,6 +105,7 @@
 
     // A process with lower priority (e.g. background app) should not be able to reclaim
     // MediaCodec resources from a process with higher priority (e.g. foreground app).
+    @ApiTest(apis = "MediaCodec#createByCodecNameForClient")
     @Test
     public void testLowerPriorityProcessFailsToReclaimResources() throws Exception {
         CodecInfo codecInfo = getFirstVideoHardwareDecoder();
@@ -188,6 +192,7 @@
 
     // A process with higher priority (e.g. foreground app) should be able to reclaim
     // MediaCodec resources from a process with lower priority (e.g. background app).
+    @ApiTest(apis = "MediaCodec#createByCodecNameForClient")
     @Test
     public void testHigherPriorityProcessReclaimsResources() throws Exception {
         CodecInfo codecInfo = getFirstVideoHardwareDecoder();
diff --git a/tests/tests/media/codec/src/android/media/codec/cts/MediaCodecTest.java b/tests/tests/media/codec/src/android/media/codec/cts/MediaCodecTest.java
index 3173465..52d01b2 100644
--- a/tests/tests/media/codec/src/android/media/codec/cts/MediaCodecTest.java
+++ b/tests/tests/media/codec/src/android/media/codec/cts/MediaCodecTest.java
@@ -68,6 +68,7 @@
 import androidx.test.filters.SmallTest;
 
 import com.android.compatibility.common.util.ApiLevelUtil;
+import com.android.compatibility.common.util.ApiTest;
 import com.android.compatibility.common.util.MediaUtils;
 import com.android.compatibility.common.util.Preconditions;
 
@@ -142,6 +143,18 @@
      * A selective test to ensure proper exceptions are thrown from MediaCodec
      * methods when called in incorrect operational states.
      */
+    @ApiTest(apis = {"MediaCodec#createByCodecName", "MediaCodec#createDecoderByType",
+            "MediaCodec#createEncoderByType", "MediaCodec#start", "MediaCodec#flush",
+            "MediaCodec#configure", "MediaCodec#dequeueInputBuffer",
+            "MediaCodec#dequeueOutputBuffer", "MediaCodec#createInputSurface",
+            "MediaCodec#getInputBuffers", "MediaCodec#getQueueRequest",
+            "MediaCodec#getOutputFrame", "MediaCodec#stop", "MediaCodec#release",
+            "MediaCodec#getCodecInfo", "MediaCodec#getSupportedVendorParameters",
+            "MediaCodec#getParameterDescriptor",
+            "MediaCodec#subscribeToVendorParameters",
+            "MediaCodec#unsubscribeFromVendorParameters",
+            "MediaCodec#getInputBuffer", "MediaCodec#getOutputBuffer",
+            "MediaCodec#setCallback", "MediaCodec#getName"})
     @Test
     public void testException() throws Exception {
         boolean tested = false;
@@ -245,15 +258,6 @@
             fail("flush should not return MediaCodec.CodecException on wrong state");
         } catch (IllegalStateException e) { // expected
         }
-        try {
-            codec.setParameters(new Bundle());
-            fail("setParameters should not work if not yet configured/initialized");
-        } catch (MediaCodec.CodecException e) {
-            logMediaCodecException(e);
-            fail("setParameters should not return MediaCodec.CodecException if not yet "
-                    + "configured/initialized");
-        } catch (IllegalStateException e) { // expected
-        }
 
         MediaCodecInfo codecInfo = codec.getCodecInfo(); // obtaining the codec info now is fine.
         try {
@@ -553,6 +557,7 @@
      * <br> calling createInputSurface() after start() throws exception
      * <br> calling createInputSurface() with a non-Surface color format is not required to throw exception
      */
+    @ApiTest(apis = "MediaCodec#createInputSurface")
     @Test
     public void testCreateInputSurfaceErrors() {
         if (!supportsCodec(MIME_TYPE, true)) {
@@ -604,6 +609,7 @@
      * <br> signaling EOS twice throws exception
      * <br> submitting a frame after EOS throws exception [TODO]
      */
+    @ApiTest(apis = "MediaCodec#signalEndOfInputStream")
     @Test
     public void testSignalSurfaceEOS() {
         if (!supportsCodec(MIME_TYPE, true)) {
@@ -662,6 +668,7 @@
      * Tests:
      * <br> stopping with buffers in flight doesn't crash or hang
      */
+    @ApiTest(apis = "MediaCodec#stop")
     @Test
     public void testAbruptStop() {
         if (!supportsCodec(MIME_TYPE, true)) {
@@ -718,6 +725,7 @@
         }
     }
 
+    @ApiTest(apis = {"MediaCodec#flush", "MediaCodec#release"})
     @Test
     public void testReleaseAfterFlush() throws IOException, InterruptedException {
         String mimes[] = new String[] { MIME_TYPE, MIME_TYPE_AUDIO};
@@ -781,11 +789,13 @@
         callbackThread.join();
     }
 
+    @ApiTest(apis = {"MediaCodec#setCallback", "MediaCodec#flush", "MediaCodec#reset"})
     @Test
     public void testAsyncFlushAndReset() throws Exception, InterruptedException {
         testAsyncReset(false /* testStop */);
     }
 
+    @ApiTest(apis = {"MediaCodec#setCallback", "MediaCodec#stop", "MediaCodec#reset"})
     @Test
     public void testAsyncStopAndReset() throws Exception, InterruptedException {
         testAsyncReset(true /* testStop */);
@@ -1116,6 +1126,7 @@
      * Tests:
      * <br> dequeueInputBuffer() fails when encoder configured with an input Surface
      */
+    @ApiTest(apis = {"MediaCodec#dequeueInputBuffer", "MediaCodec#getMetrics"})
     @Test
     public void testDequeueSurface() {
         if (!supportsCodec(MIME_TYPE, true)) {
@@ -1175,6 +1186,8 @@
      * <br> configure() encoder with Surface, re-configure() without Surface works
      * <br> sending EOS with signalEndOfInputStream on non-Surface encoder fails
      */
+    @ApiTest(apis = {"MediaCodec#configure", "MediaCodec#signalEndOfInputStream",
+            "MediaCodec#getMetrics"})
     @Test
     public void testReconfigureWithoutSurface() {
         if (!supportsCodec(MIME_TYPE, true)) {
@@ -1244,6 +1257,7 @@
         }
     }
 
+    @ApiTest(apis = "MediaCodec#flush")
     @Test
     public void testDecodeAfterFlush() throws InterruptedException {
         testDecodeAfterFlush(true /* audio */);
@@ -1425,6 +1439,7 @@
      * Tests whether decoding a short group-of-pictures succeeds. The test queues a few video frames
      * then signals end-of-stream. The test fails if the decoder doesn't output the queued frames.
      */
+    @ApiTest(apis = {"MediaCodecInfo.CodecCapabilities#COLOR_FormatSurface"})
     @Test
     public void testDecodeShortInput() throws InterruptedException {
         // Input buffers from this input video are queued up to and including the video frame with
@@ -1536,6 +1551,10 @@
     /**
      * Tests creating two decoders for {@link #MIME_TYPE_AUDIO} at the same time.
      */
+    @ApiTest(apis = {"MediaCodec#createDecoderByType",
+            "android.media.MediaFormat#KEY_MIME",
+            "android.media.MediaFormat#KEY_SAMPLE_RATE",
+            "android.media.MediaFormat#KEY_CHANNEL_COUNT"})
     @Test
     public void testCreateTwoAudioDecoders() {
         final MediaFormat format = MediaFormat.createAudioFormat(
@@ -1583,6 +1602,10 @@
     /**
      * Tests creating an encoder and decoder for {@link #MIME_TYPE_AUDIO} at the same time.
      */
+    @ApiTest(apis = {"MediaCodec#createDecoderByType", "MediaCodec#createEncoderByType",
+            "android.media.MediaFormat#KEY_MIME",
+            "android.media.MediaFormat#KEY_SAMPLE_RATE",
+            "android.media.MediaFormat#KEY_CHANNEL_COUNT"})
     @Test
     public void testCreateAudioDecoderAndEncoder() {
         if (!supportsCodec(MIME_TYPE_AUDIO, true)) {
@@ -1641,6 +1664,12 @@
         }
     }
 
+    @ApiTest(apis = {"MediaCodec#createEncoderByType",
+            "android.media.MediaFormat#KEY_MIME",
+            "android.media.MediaFormat#KEY_SAMPLE_RATE",
+            "android.media.MediaFormat#KEY_CHANNEL_COUNT",
+            "android.media.MediaFormat#KEY_WIDTH",
+            "android.media.MediaFormat#KEY_HEIGHT"})
     @Test
     public void testConcurrentAudioVideoEncodings() throws InterruptedException {
         if (!supportsCodec(MIME_TYPE_AUDIO, true)) {
@@ -1689,6 +1718,7 @@
         public int mBitRate;
     }
 
+    @ApiTest(apis = {"MediaCodec#CryptoInfo", "MediaCodec#CryptoInfo#Pattern"})
     @Test
     public void testCryptoInfoPattern() {
         CryptoInfo info = new CryptoInfo();
@@ -1989,6 +2019,7 @@
     /**
      * Tests MediaCodec.CryptoException
      */
+    @ApiTest(apis = "MediaCodec#CryptoException")
     @Test
     public void testCryptoException() {
         int errorCode = CryptoException.ERROR_KEY_EXPIRED;
@@ -2007,6 +2038,8 @@
      *
      * As of Q, any codec of type "audio/raw" must support PCM encoding float.
      */
+    @ApiTest(apis = {"android.media.AudioFormat#ENCODING_PCM_16BIT",
+            "android.media.AudioFormat#ENCODING_PCM_FLOAT"})
     @MediumTest
     @Test
     public void testPCMEncoding() throws Exception {
@@ -2132,6 +2165,7 @@
         return actualEncoding;
     }
 
+    @ApiTest(apis = "android.media.AudioFormat#KEY_FLAC_COMPRESSION_LEVEL")
     @SmallTest
     @Test
     public void testFlacIdentity() throws Exception {
@@ -2185,6 +2219,7 @@
         }
     }
 
+    @ApiTest(apis = "MediaCodec#release")
     @Test
     public void testAsyncRelease() throws Exception {
         OutputSurface outputSurface = new OutputSurface(1, 1);
@@ -2272,6 +2307,7 @@
         }
     }
 
+    @ApiTest(apis = "MediaCodec#setAudioPresentation")
     @Test
     public void testSetAudioPresentation() throws Exception {
         MediaFormat format = MediaFormat.createAudioFormat(
@@ -2287,6 +2323,7 @@
                 (new AudioPresentation.Builder(42 /* presentationId */)).build());
     }
 
+    @ApiTest(apis = "android.media.MediaFormat#KEY_PREPEND_HEADER_TO_SYNC_FRAMES")
     @Test
     public void testPrependHeadersToSyncFrames() throws IOException {
         MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
@@ -2361,6 +2398,7 @@
      * codec is flushed after the first buffer is queued, so this test walks
      * through the scenario.
      */
+    @ApiTest(apis = "MediaCodec#flush")
     @Test
     public void testFlushAfterFirstBuffer() throws Exception {
         if (MediaUtils.check(mIsAtLeastR, "test needs Android 11")) {
@@ -2458,6 +2496,10 @@
         }
     }
 
+    @ApiTest(apis = {"MediaCodec#getSupportedVendorParameters",
+            "MediaCodec#getParameterDescriptor",
+            "MediaCodec#subscribeToVendorParameters",
+            "MediaCodec#unsubscribeFromVendorParameters"})
     @Test
     public void testVendorParameters() {
         if (!MediaUtils.check(mIsAtLeastS, "test needs Android 12")) {
diff --git a/tests/tests/media/codec/src/android/media/codec/cts/VideoCodecTest.java b/tests/tests/media/codec/src/android/media/codec/cts/VideoCodecTest.java
index ac0dd0f..6c67812 100644
--- a/tests/tests/media/codec/src/android/media/codec/cts/VideoCodecTest.java
+++ b/tests/tests/media/codec/src/android/media/codec/cts/VideoCodecTest.java
@@ -26,6 +26,7 @@
 import android.platform.test.annotations.AppModeFull;
 import android.util.Log;
 
+import com.android.compatibility.common.util.ApiTest;
 import com.android.compatibility.common.util.MediaUtils;
 
 import org.junit.Assume;
@@ -625,41 +626,65 @@
         }
     }
 
+    @ApiTest(apis = {"android.media.MediaFormat#KEY_BITRATE_MODE",
+            "android.media.MediaFormat#KEY_BIT_RATE",
+            "android.media.MediaFormat#KEY_COLOR_FORMAT",
+            "android.media.MediaFormat#KEY_FRAME_RATE",
+            "android.media.MediaFormat#KEY_I_FRAME_INTERVAL"})
     @Test
     public void testBasic() throws Exception {
         internalTestBasic(mCodecName, mCodecMimeType, mBitRateMode);
     }
 
+    @ApiTest(apis = {"android.media.MediaFormat#KEY_BITRATE_MODE",
+            "android.media.MediaFormat#KEY_BIT_RATE",
+            "android.media.MediaFormat#KEY_COLOR_FORMAT",
+            "android.media.MediaFormat#KEY_FRAME_RATE",
+            "android.media.MediaFormat#KEY_I_FRAME_INTERVAL"})
     @Test
     public void testAsyncEncode() throws Exception {
         internalTestAsyncEncoding(mCodecName, mCodecMimeType, mBitRateMode);
     }
 
+    @ApiTest(apis = "android.media.MediaCodec#PARAMETER_KEY_REQUEST_SYNC_FRAME")
     @Test
     public void testSyncFrame() throws Exception {
         internalTestSyncFrame(mCodecName, mCodecMimeType, mBitRateMode, false);
     }
 
+    @ApiTest(apis = "android.media.MediaCodec#PARAMETER_KEY_REQUEST_SYNC_FRAME")
     @Test
     public void testSyncFrameNdk() throws Exception {
         internalTestSyncFrame(mCodecName, mCodecMimeType, mBitRateMode, true);
     }
 
+    @ApiTest(apis = "android.media.MediaCodec#PARAMETER_KEY_VIDEO_BITRATE")
     @Test
     public void testDynamicBitrateChange() throws Exception {
         internalTestDynamicBitrateChange(mCodecName, mCodecMimeType, mBitRateMode, false);
     }
 
+    @ApiTest(apis = "android.media.MediaCodec#PARAMETER_KEY_VIDEO_BITRATE")
     @Test
     public void testDynamicBitrateChangeNdk() throws Exception {
         internalTestDynamicBitrateChange(mCodecName, mCodecMimeType, mBitRateMode, true);
     }
 
+    @ApiTest(apis = {"android.media.MediaFormat#KEY_BITRATE_MODE",
+            "android.media.MediaFormat#KEY_BIT_RATE",
+            "android.media.MediaFormat#KEY_COLOR_FORMAT",
+            "android.media.MediaFormat#KEY_FRAME_RATE",
+            "android.media.MediaFormat#KEY_I_FRAME_INTERVAL"})
     @Test
     public void testEncoderQuality() throws Exception {
         internalTestEncoderQuality(mCodecName, mCodecMimeType, mBitRateMode);
     }
 
+    @ApiTest(apis = {"android.media.MediaFormat#KEY_BITRATE_MODE",
+            "android.media.MediaFormat#KEY_BIT_RATE",
+            "android.media.MediaFormat#KEY_COLOR_FORMAT",
+            "android.media.MediaFormat#KEY_FRAME_RATE",
+            "android.media.MediaFormat#KEY_I_FRAME_INTERVAL"})
     @Test
     public void testParallelEncodingAndDecoding() throws Exception {
         Assume.assumeTrue("Parallel Encode Decode test is run only for VBR mode",
diff --git a/tests/tests/media/codec/src/android/media/codec/cts/VideoDecoderRotationTest.java b/tests/tests/media/codec/src/android/media/codec/cts/VideoDecoderRotationTest.java
index 78021f4..fb61f78 100644
--- a/tests/tests/media/codec/src/android/media/codec/cts/VideoDecoderRotationTest.java
+++ b/tests/tests/media/codec/src/android/media/codec/cts/VideoDecoderRotationTest.java
@@ -26,6 +26,7 @@
 import androidx.test.filters.SmallTest;
 import androidx.test.platform.app.InstrumentationRegistry;
 
+import com.android.compatibility.common.util.ApiTest;
 import com.android.compatibility.common.util.NonMainlineTest;
 
 import org.junit.Test;
@@ -96,6 +97,7 @@
         return testParams;
     }
 
+    @ApiTest(apis = "android.media.MediaFormat#KEY_ROTATION")
     @Test
     public void testRendering800x480Rotated() throws Throwable {
         if (mImpl.isConcurrentEncodingDecodingSupported(
diff --git a/tests/tests/media/codec/src/android/media/codec/cts/VideoEncodingStatisticsTest.java b/tests/tests/media/codec/src/android/media/codec/cts/VideoEncodingStatisticsTest.java
index 4979f76..4239679 100644
--- a/tests/tests/media/codec/src/android/media/codec/cts/VideoEncodingStatisticsTest.java
+++ b/tests/tests/media/codec/src/android/media/codec/cts/VideoEncodingStatisticsTest.java
@@ -26,6 +26,7 @@
 import android.platform.test.annotations.AppModeFull;
 import android.util.Log;
 
+import com.android.compatibility.common.util.ApiTest;
 import com.android.compatibility.common.util.MediaUtils;
 
 import org.junit.Assume;
@@ -195,6 +196,9 @@
         }
     }
 
+    @ApiTest(apis = {"MediaCodecInfo.CodecCapabilities#FEATURE_EncodingStatistics",
+            "android.media.MediaFormat#KEY_VIDEO_ENCODING_STATISTICS_LEVEL",
+            "android.media.MediaFormat#KEY_VIDEO_QP_AVERAGE"})
     @Test
     public void testEncodingStatisticsAvgQp() throws Exception {
        testEncStatRateAvgQp(mCodecName, mCodecMimeType, mBitRateMode);
diff --git a/tests/tests/os/src/android/os/storage/cts/StorageManagerHelper.java b/tests/tests/os/src/android/os/storage/cts/StorageManagerHelper.java
index 5c53270..bb747e6 100644
--- a/tests/tests/os/src/android/os/storage/cts/StorageManagerHelper.java
+++ b/tests/tests/os/src/android/os/storage/cts/StorageManagerHelper.java
@@ -16,8 +16,6 @@
 
 package android.os.storage.cts;
 
-import android.content.Context;
-import android.content.pm.PackageManager;
 import android.os.ParcelFileDescriptor;
 import android.util.Log;
 
@@ -125,23 +123,6 @@
         return null;
     }
 
-    public static boolean isAdoptableStorageSupported(Context context) throws Exception {
-        return hasAdoptableStorageFeature(context) || hasAdoptableStorageFstab();
-    }
-
-    private static boolean hasAdoptableStorageFstab() throws Exception {
-        List<String> hasAdoptable = executeShellCommand("sm has-adoptable");
-        if (hasAdoptable.isEmpty()) {
-            return false;
-        }
-        return Boolean.parseBoolean(hasAdoptable.get(0).trim());
-    }
-
-    private static boolean hasAdoptableStorageFeature(Context context) throws Exception {
-        return context.getPackageManager().hasSystemFeature(
-                PackageManager.FEATURE_ADOPTABLE_STORAGE);
-    }
-
     private static List<String> executeShellCommand(String command) throws Exception {
         final ParcelFileDescriptor pfd = InstrumentationRegistry.getInstrumentation()
                 .getUiAutomation().executeShellCommand(command);
diff --git a/tests/tests/os/src/android/os/storage/cts/StorageManagerTest.java b/tests/tests/os/src/android/os/storage/cts/StorageManagerTest.java
index a54dfd5..af72be9 100644
--- a/tests/tests/os/src/android/os/storage/cts/StorageManagerTest.java
+++ b/tests/tests/os/src/android/os/storage/cts/StorageManagerTest.java
@@ -18,13 +18,6 @@
 
 import static com.google.common.truth.Truth.assertThat;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-import static org.junit.Assume.assumeTrue;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.when;
 import static org.testng.Assert.assertThrows;
@@ -32,7 +25,6 @@
 import static java.util.stream.Collectors.joining;
 
 import android.app.PendingIntent;
-import android.content.Context;
 import android.content.res.Resources;
 import android.content.res.Resources.NotFoundException;
 import android.os.Environment;
@@ -53,18 +45,16 @@
 import android.system.ErrnoException;
 import android.system.Os;
 import android.system.OsConstants;
+import android.test.AndroidTestCase;
 import android.test.ComparisonFailure;
 import android.util.Log;
 
-import androidx.test.ext.junit.runners.AndroidJUnit4;
 import androidx.test.platform.app.InstrumentationRegistry;
 
 import com.android.compatibility.common.util.FileUtils;
 import com.android.compatibility.common.util.SystemUtil;
 
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
+import junit.framework.AssertionFailedError;
 
 import java.io.ByteArrayOutputStream;
 import java.io.File;
@@ -86,8 +76,7 @@
 import java.util.concurrent.TimeUnit;
 import java.util.stream.Collectors;
 
-@RunWith(AndroidJUnit4.class)
-public class StorageManagerTest {
+public class StorageManagerTest extends AndroidTestCase {
 
     private static final String TAG = StorageManager.class.getSimpleName();
 
@@ -97,17 +86,15 @@
     private static final String OBB_MOUNT_PREFIX = "/mnt/obb/";
     private static final String TEST1_NEW_CONTENTS = "1\n";
 
-    private Context mContext;
     private StorageManager mStorageManager;
     private final Handler mHandler = new Handler(Looper.getMainLooper());
 
-    @Before
-    public void setUp() throws Exception {
-        mContext = InstrumentationRegistry.getInstrumentation().getContext();
+    @Override
+    protected void setUp() throws Exception {
+        super.setUp();
         mStorageManager = mContext.getSystemService(StorageManager.class);
     }
 
-    @Test
     @AppModeFull(reason = "Instant apps cannot access external storage")
     public void testMountAndUnmountObbNormal() throws IOException {
         for (File target : getTargetFiles()) {
@@ -135,7 +122,6 @@
         }
     }
 
-    @Test
     @AppModeFull(reason = "Instant apps cannot access external storage")
     public void testAttemptMountNonObb() {
         for (File target : getTargetFiles()) {
@@ -160,7 +146,6 @@
                 mStorageManager.getMountedObbPath(outFile.getPath()));
     }
 
-    @Test
     @AppModeFull(reason = "Instant apps cannot access external storage")
     public void testAttemptMountObbWrongPackage() {
         for (File target : getTargetFiles()) {
@@ -181,7 +166,6 @@
                 mStorageManager.getMountedObbPath(outFile.getPath()));
     }
 
-    @Test
     @AppModeFull(reason = "Instant apps cannot access external storage")
     public void testMountAndUnmountTwoObbs() throws IOException {
         for (File target : getTargetFiles()) {
@@ -221,7 +205,6 @@
         }
     }
 
-    @Test
     public void testGetPrimaryVolume() throws Exception {
         final StorageVolume volume = mStorageManager.getPrimaryStorageVolume();
         assertNotNull("Did not get primary storage", volume);
@@ -273,7 +256,6 @@
         }
     }
 
-    @Test
     @AppModeFull(reason = "Instant apps cannot access external storage")
     public void testGetStorageVolumes() throws Exception {
         final List<StorageVolume> volumes = mStorageManager.getStorageVolumes();
@@ -289,7 +271,6 @@
         assertStorageVolumesEquals(primary, mStorageManager.getPrimaryStorageVolume());
     }
 
-    @Test
     @AppModeFull(reason = "Instant apps cannot access external storage")
     public void testGetRecentStorageVolumes() throws Exception {
         // At a minimum recent volumes should include current volumes
@@ -300,7 +281,6 @@
         assertTrue(recentNames.containsAll(currentNames));
     }
 
-    @Test
     @AppModeFull(reason = "Instant apps cannot access external storage")
     public void testGetStorageVolume() throws Exception {
         assertNull("Should not get volume for null path",
@@ -324,11 +304,8 @@
         assertStorageVolumesEquals(primary, childVolume);
     }
 
-    @Test
     @AppModeFull(reason = "Instant apps cannot access external storage")
     public void testGetStorageVolumeUSB() throws Exception {
-        assumeTrue(StorageManagerHelper.isAdoptableStorageSupported(mContext));
-
         String volumeName = StorageManagerHelper.createUSBVirtualDisk();
         Log.d(TAG, "testGetStorageVolumeUSB#volumeName: " + volumeName);
         List<StorageVolume> storageVolumes = mStorageManager.getStorageVolumes();
@@ -341,11 +318,8 @@
                 .collect(joining("\n")), usbStorageVolume.isPresent());
     }
 
-    @Test
     @AppModeFull(reason = "Instant apps cannot access external storage")
     public void testGetStorageVolumeSDCard() throws Exception {
-        assumeTrue(StorageManagerHelper.isAdoptableStorageSupported(mContext));
-
         String volumeName = StorageManagerHelper.createSDCardVirtualDisk();
         Log.d(TAG, "testGetStorageVolumeSDCard#volumeName: " + volumeName);
         List<StorageVolume> storageVolumes = mStorageManager.getStorageVolumes();
@@ -366,7 +340,6 @@
         }
     }
 
-    @Test
     public void testGetUuidForPath() throws Exception {
         assertEquals(StorageManager.UUID_DEFAULT,
                 mStorageManager.getUuidForPath(Environment.getDataDirectory()));
@@ -377,7 +350,6 @@
         assertNoUuid(new File("/proc/"));
     }
 
-    @Test
     @AppModeFull(reason = "Instant apps cannot access external storage")
     public void testGetExternalUuidForPath() throws Exception {
         final UUID extUuid = mStorageManager
@@ -390,7 +362,6 @@
         assertEquals(extUuid, mStorageManager.getUuidForPath(new File("/sdcard/")));
     }
 
-    @Test
     @AppModeFull(reason = "Instant apps cannot access external storage")
     public void testCallback() throws Exception {
         final CountDownLatch mounted = new CountDownLatch(1);
@@ -485,7 +456,6 @@
         }
     }
 
-    @Test
     public void testOpenProxyFileDescriptor() throws Exception {
         final TestProxyFileDescriptorCallback appleCallback =
                 new TestProxyFileDescriptorCallback(1024 * 1024, "Apple");
@@ -608,7 +578,7 @@
                 assertEquals(1, orangeCallback.releaseCount);
                 assertEquals(1, cherryCallback.releaseCount);
                 break;
-            } catch (AssertionError error) {
+            } catch (AssertionFailedError error) {
                 if (retry-- > 0) {
                    Thread.sleep(500);
                    continue;
@@ -619,7 +589,6 @@
         }
     }
 
-    @Test
     public void testOpenProxyFileDescriptor_error() throws Exception {
         final TestProxyFileDescriptorCallback callback =
                 new TestProxyFileDescriptorCallback(1024 * 1024, "Error");
@@ -681,7 +650,6 @@
         }
     }
 
-    @Test
     public void testOpenProxyFileDescriptor_async() throws Exception {
         final CountDownLatch blockReadLatch = new CountDownLatch(1);
         final CountDownLatch readBlockedLatch = new CountDownLatch(1);
@@ -759,7 +727,6 @@
         looperThread.join();
     }
 
-    @Test
     public void testOpenProxyFileDescriptor_largeFile() throws Exception {
         final ProxyFileDescriptorCallback callback = new ProxyFileDescriptorCallback() {
             @Override
@@ -792,7 +759,6 @@
         }
     }
 
-    @Test
     public void testOpenProxyFileDescriptor_largeRead() throws Exception {
         final int SIZE = 1024 * 1024;
         final TestProxyFileDescriptorCallback callback =
@@ -809,7 +775,6 @@
         }
     }
 
-    @Test
     public void testOpenProxyFileDescriptor_largeWrite() throws Exception {
         final int SIZE = 1024 * 1024;
         final TestProxyFileDescriptorCallback callback =
@@ -829,7 +794,6 @@
         }
     }
 
-    @Test
     public void testIsAllocationSupported() throws Exception {
         FileDescriptor good = Os.open(
             File.createTempFile("StorageManagerTest", "").getAbsolutePath(),
@@ -849,7 +813,6 @@
         }
     }
 
-    @Test
     public void testFatUuidHandling() throws Exception {
         assertEquals(UUID.fromString("fafafafa-fafa-5afa-8afa-fafa01234567"),
                 StorageManager.convert("0123-4567"));
@@ -874,7 +837,6 @@
                 StorageManager.convert(UUID.fromString("fafafafa-fafa-5afa-8afa-fafadeadbeef")));
     }
 
-    @Test
     @AppModeFull(reason = "Instant apps cannot hold MANAGE_EXTERNAL_STORAGE permission")
     public void testGetManageSpaceActivityIntent() throws Exception {
         String packageName = "android.os.cts";
@@ -1078,7 +1040,6 @@
         }
     }
 
-    @Test
     public void testComputeStorageCacheBytes() throws Exception {
         File mockFile = mock(File.class);
 
diff --git a/tests/tests/permission3/CreateNotificationChannelsApp31/src/android/permission3/cts/usepermission/CreateNotificationChannelsActivity.kt b/tests/tests/permission3/CreateNotificationChannelsApp31/src/android/permission3/cts/usepermission/CreateNotificationChannelsActivity.kt
index 104655f..3c8a35c 100644
--- a/tests/tests/permission3/CreateNotificationChannelsApp31/src/android/permission3/cts/usepermission/CreateNotificationChannelsActivity.kt
+++ b/tests/tests/permission3/CreateNotificationChannelsApp31/src/android/permission3/cts/usepermission/CreateNotificationChannelsActivity.kt
@@ -22,6 +22,7 @@
 import android.app.NotificationManager
 import android.content.Intent
 import android.content.pm.PackageManager
+import android.os.Bundle
 import android.os.Handler
 import android.os.Looper
 
@@ -43,9 +44,13 @@
     lateinit var notificationManager: NotificationManager
     var launchActivityOnSecondResume = false
     var isFirstResume = true
+    var windowHasFocus = false
+    var pendingCreateChannel = false
     val handler = Handler(Looper.getMainLooper())
 
-    override fun onStart() {
+    override fun onCreate(savedInstanceState: Bundle?) {
+        super.onCreate(savedInstanceState)
+
         val launchSecondActivity = intent.getBooleanExtra(EXTRA_START_SECOND_ACTIVITY, false)
         notificationManager = baseContext.getSystemService(NotificationManager::class.java)!!
         if (intent.getBooleanExtra(EXTRA_START_SECOND_APP, false)) {
@@ -73,7 +78,6 @@
             }
         }
 
-
         if (intent.getBooleanExtra(EXTRA_REQUEST_OTHER_PERMISSIONS, false)) {
             requestPermissions(arrayOf(Manifest.permission.RECORD_AUDIO), 0)
         } else if (intent.getBooleanExtra(EXTRA_REQUEST_OTHER_PERMISSIONS_DELAYED, false)) {
@@ -85,8 +89,6 @@
         if (intent.getBooleanExtra(EXTRA_REQUEST_NOTIF_PERMISSION, false)) {
             requestPermissions(arrayOf(Manifest.permission.POST_NOTIFICATIONS), 0)
         }
-
-        super.onStart()
     }
 
     private fun launchSecondActivity() {
@@ -99,7 +101,21 @@
                             }, LONG_DELAY_MS)
     }
 
+    override fun onWindowFocusChanged(hasFocus: Boolean) {
+        windowHasFocus = hasFocus
+        if (windowHasFocus && pendingCreateChannel) {
+            pendingCreateChannel = false
+            createChannel()
+        }
+    }
+
     private fun createChannel() {
+        // Wait until window has focus so the permission prompt can be displayed
+        if (!windowHasFocus) {
+            pendingCreateChannel = true
+            return
+        }
+
         if (notificationManager.getNotificationChannel(CHANNEL_ID_31) == null) {
             notificationManager.createNotificationChannel(NotificationChannel(CHANNEL_ID_31,
                 "Foreground Services", NotificationManager.IMPORTANCE_HIGH))
diff --git a/tests/tests/permission3/src/android/permission3/cts/NotificationPermissionTest.kt b/tests/tests/permission3/src/android/permission3/cts/NotificationPermissionTest.kt
index d55e3a4..dee76c2 100644
--- a/tests/tests/permission3/src/android/permission3/cts/NotificationPermissionTest.kt
+++ b/tests/tests/permission3/src/android/permission3/cts/NotificationPermissionTest.kt
@@ -194,6 +194,7 @@
     fun notificationPromptShownForSubsequentStartsIfTaskStartWasLauncher() {
         installPackage(APP_APK_PATH_CREATE_NOTIFICATION_CHANNELS_31, expectSuccess = true)
         launchApp(startSecondActivity = true)
+        waitFindObject(By.res(ALLOW_BUTTON))
         pressBack()
         clickPermissionRequestAllowButton()
     }
diff --git a/tests/tests/security/src/android/security/cts/PackageSignatureTest.java b/tests/tests/security/src/android/security/cts/PackageSignatureTest.java
index 4bfe8fe..b6ee6ed 100644
--- a/tests/tests/security/src/android/security/cts/PackageSignatureTest.java
+++ b/tests/tests/security/src/android/security/cts/PackageSignatureTest.java
@@ -102,7 +102,7 @@
         wellKnownSignatures.add(getSignature(R.raw.sig_com_google_android_runtime_debug));
         wellKnownSignatures.add(getSignature(R.raw.sig_com_google_android_runtime_release));
         wellKnownSignatures.add(getSignature(R.raw.sig_com_google_android_tzdata3));
-        // The following keys are not not used by modules on the latest Android release, but it
+        // The following keys are not used by modules on the latest Android release, but it
         // won't negatively affect tests to include their signatures here too.
         wellKnownSignatures.add(getSignature(R.raw.sig_com_google_android_tzdata));
         wellKnownSignatures.add(getSignature(R.raw.sig_com_google_android_tzdata2));
diff --git a/tests/tests/vcn/src/android/net/vcn/cts/VcnManagerTest.java b/tests/tests/vcn/src/android/net/vcn/cts/VcnManagerTest.java
index 6051a5c..8de86d3 100644
--- a/tests/tests/vcn/src/android/net/vcn/cts/VcnManagerTest.java
+++ b/tests/tests/vcn/src/android/net/vcn/cts/VcnManagerTest.java
@@ -519,6 +519,128 @@
         }
     }
 
+    private class NetworkSelectionTestNetworkParams {
+        public final boolean isMetered;
+
+        NetworkSelectionTestNetworkParams(boolean isMetered) {
+            this.isMetered = isMetered;
+        }
+    }
+
+    private TestNetworkWrapper createTestNetworkForNetworkSelection(
+            int subId, NetworkSelectionTestNetworkParams params) throws Exception {
+        return createTestNetworkWrapper(params.isMetered, subId, LOCAL_ADDRESS);
+    }
+
+    private void verifyVcnMigratesToPreferredUnderlyingNetwork(
+            VcnConfig vcnConfig,
+            NetworkSelectionTestNetworkParams lessPreferred,
+            NetworkSelectionTestNetworkParams preferred)
+            throws Exception {
+        final int subId = verifyAndGetValidDataSubId();
+
+        // Start on a less preferred network.
+        try (TestNetworkWrapper testNetworkWrapperLessPreferred =
+                createTestNetworkForNetworkSelection(subId, lessPreferred)) {
+            verifyUnderlyingCellAndRunTest(
+                    subId,
+                    (subGrp, cellNetwork, cellNetworkCb) -> {
+                        final VcnSetupResult vcnSetupResult =
+                                setupAndGetVcnNetwork(
+                                        subGrp,
+                                        cellNetwork,
+                                        cellNetworkCb,
+                                        vcnConfig,
+                                        testNetworkWrapperLessPreferred);
+
+                        // Then bring up a more preferred network, and expect to switch to it.
+                        try (TestNetworkWrapper testNetworkWrapperPreferred =
+                                createTestNetworkForNetworkSelection(subId, preferred)) {
+                            injectAndVerifyIkeMobikePackets(
+                                    testNetworkWrapperPreferred.ikeTunUtils);
+
+                            clearVcnConfigsAndVerifyNetworkTeardown(
+                                    subGrp, cellNetworkCb, vcnSetupResult.vcnNetwork);
+                        }
+                    });
+        }
+    }
+
+    private void verifyVcnDoesNotSelectLessPreferredUnderlyingNetwork(
+            VcnConfig vcnConfig,
+            NetworkSelectionTestNetworkParams lessPreferred,
+            NetworkSelectionTestNetworkParams preferred)
+            throws Exception {
+        final int subId = verifyAndGetValidDataSubId();
+
+        // Start on a more preferred network.
+        try (TestNetworkWrapper testNetworkWrapperPreferred =
+                createTestNetworkForNetworkSelection(subId, preferred)) {
+            verifyUnderlyingCellAndRunTest(
+                    subId,
+                    (subGrp, cellNetwork, cellNetworkCb) -> {
+                        final VcnSetupResult vcnSetupResult =
+                                setupAndGetVcnNetwork(
+                                        subGrp,
+                                        cellNetwork,
+                                        cellNetworkCb,
+                                        vcnConfig,
+                                        testNetworkWrapperPreferred);
+
+                        // Then bring up a less preferred network, and expect the VCN underlying
+                        // network does not change.
+                        try (TestNetworkWrapper testNetworkWrapperLessPreferred =
+                                createTestNetworkForNetworkSelection(subId, lessPreferred)) {
+                            injectAndVerifyIkeDpdPackets(
+                                    testNetworkWrapperPreferred.ikeTunUtils,
+                                    vcnSetupResult.ikeExchangePortPair);
+
+                            clearVcnConfigsAndVerifyNetworkTeardown(
+                                    subGrp, cellNetworkCb, vcnSetupResult.vcnNetwork);
+                        }
+                    });
+        }
+    }
+
+    private void verifyVcnMigratesAfterPreferredUnderlyingNetworkDies(
+            VcnConfig vcnConfig,
+            NetworkSelectionTestNetworkParams lessPreferred,
+            NetworkSelectionTestNetworkParams preferred)
+            throws Exception {
+        final int subId = verifyAndGetValidDataSubId();
+
+        // Start on a more preferred network
+        try (TestNetworkWrapper testNetworkWrapperPreferred =
+                createTestNetworkForNetworkSelection(subId, preferred)) {
+            verifyUnderlyingCellAndRunTest(
+                    subId,
+                    (subGrp, cellNetwork, cellNetworkCb) -> {
+                        final VcnSetupResult vcnSetupResult =
+                                setupAndGetVcnNetwork(
+                                        subGrp,
+                                        cellNetwork,
+                                        cellNetworkCb,
+                                        vcnConfig,
+                                        testNetworkWrapperPreferred);
+
+                        // Bring up a less preferred network
+                        try (TestNetworkWrapper testNetworkWrapperLessPreferred =
+                                createTestNetworkForNetworkSelection(subId, lessPreferred)) {
+                            // Teardown the preferred network
+                            testNetworkWrapperPreferred.close();
+                            testNetworkWrapperPreferred.vcnNetworkCallback.waitForLost();
+
+                            // Verify the VCN switches to the remaining less preferred network
+                            injectAndVerifyIkeMobikePackets(
+                                    testNetworkWrapperLessPreferred.ikeTunUtils);
+
+                            clearVcnConfigsAndVerifyNetworkTeardown(
+                                    subGrp, cellNetworkCb, vcnSetupResult.vcnNetwork);
+                        }
+                    });
+        }
+    }
+
     private VcnConfig createVcnConfigPrefersMetered() throws Exception {
         final List<VcnUnderlyingNetworkTemplate> nwTemplates = new ArrayList<>();
         nwTemplates.add(
@@ -529,98 +651,29 @@
     }
 
     @Test
-    public void testVcnMigratesToPreferredUnderlyingNetwork() throws Exception {
-        final int subId = verifyAndGetValidDataSubId();
-        final VcnConfig vcnConfig = createVcnConfigPrefersMetered();
-
-        // Start on NOT_METERED, less preferred network.
-        try (TestNetworkWrapper testNetworkWrapperNotMetered =
-                createTestNetworkWrapper(false /* isMetered */, subId, LOCAL_ADDRESS)) {
-            verifyUnderlyingCellAndRunTest(subId, (subGrp, cellNetwork, cellNetworkCb) -> {
-                final VcnSetupResult vcnSetupResult =
-                    setupAndGetVcnNetwork(
-                        subGrp,
-                        cellNetwork,
-                        cellNetworkCb,
-                        vcnConfig,
-                        testNetworkWrapperNotMetered);
-
-                // Then bring up a more preferred network, and expect to switch to it.
-                try (TestNetworkWrapper testNetworkWrapperMetered =
-                        createTestNetworkWrapper(true /* isMetered */, subId, LOCAL_ADDRESS)) {
-                    injectAndVerifyIkeMobikePackets(testNetworkWrapperMetered.ikeTunUtils);
-
-                    clearVcnConfigsAndVerifyNetworkTeardown(
-                            subGrp, cellNetworkCb, vcnSetupResult.vcnNetwork);
-                }
-            });
-        }
+    public void testVcnMigratesToPreferredUnderlyingNetwork_preferMetered() throws Exception {
+        verifyVcnMigratesToPreferredUnderlyingNetwork(
+                createVcnConfigPrefersMetered(),
+                new NetworkSelectionTestNetworkParams(false /* isMetered */),
+                new NetworkSelectionTestNetworkParams(true /* isMetered */));
     }
 
     @Test
-    public void testVcnDoesNotSelectLessPreferredUnderlyingNetwork() throws Exception {
-        final int subId = verifyAndGetValidDataSubId();
-        final VcnConfig vcnConfig = createVcnConfigPrefersMetered();
-
-        // Start on METERED, more preferred network
-        try (TestNetworkWrapper testNetworkWrapperMetered =
-                createTestNetworkWrapper(true /* isMetered */, subId, LOCAL_ADDRESS)) {
-            verifyUnderlyingCellAndRunTest(subId, (subGrp, cellNetwork, cellNetworkCb) -> {
-                final VcnSetupResult vcnSetupResult =
-                        setupAndGetVcnNetwork(
-                                subGrp,
-                                cellNetwork,
-                                cellNetworkCb,
-                                vcnConfig,
-                                testNetworkWrapperMetered);
-
-                // Then bring up a less preferred network, and expect the VCN underlying
-                // network does not change.
-                try (TestNetworkWrapper testNetworkWrapperNotMetered =
-                        createTestNetworkWrapper(false /* isMetered */, subId, LOCAL_ADDRESS)) {
-                    injectAndVerifyIkeDpdPackets(
-                            testNetworkWrapperMetered.ikeTunUtils,
-                            vcnSetupResult.ikeExchangePortPair);
-
-                    clearVcnConfigsAndVerifyNetworkTeardown(
-                            subGrp, cellNetworkCb, vcnSetupResult.vcnNetwork);
-                }
-            });
-        }
+    public void testVcnDoesNotSelectLessPreferredUnderlyingNetwork_preferMetered()
+            throws Exception {
+        verifyVcnDoesNotSelectLessPreferredUnderlyingNetwork(
+                createVcnConfigPrefersMetered(),
+                new NetworkSelectionTestNetworkParams(false /* isMetered */),
+                new NetworkSelectionTestNetworkParams(true /* isMetered */));
     }
 
     @Test
-    public void testVcnMigratesAfterPreferredUnderlyingNetworkDies() throws Exception {
-        final int subId = verifyAndGetValidDataSubId();
-        final VcnConfig vcnConfig = createVcnConfigPrefersMetered();
-
-        // Start on METERED, more preferred network
-        try (TestNetworkWrapper testNetworkWrapperMetered =
-                createTestNetworkWrapper(true /* isMetered */, subId, LOCAL_ADDRESS)) {
-            verifyUnderlyingCellAndRunTest(subId, (subGrp, cellNetwork, cellNetworkCb) -> {
-                final VcnSetupResult vcnSetupResult =
-                        setupAndGetVcnNetwork(
-                                subGrp,
-                                cellNetwork,
-                                cellNetworkCb,
-                                vcnConfig,
-                                testNetworkWrapperMetered);
-
-                // Bring up a NOT_METERED, less preferred network
-                try (TestNetworkWrapper testNetworkWrapperNotMetered =
-                        createTestNetworkWrapper(false /* isMetered */, subId, LOCAL_ADDRESS)) {
-                    // Teardown the preferred network
-                    testNetworkWrapperMetered.close();
-                    testNetworkWrapperMetered.vcnNetworkCallback.waitForLost();
-
-                    // Verify the VCN switches to the remaining NOT_METERED network
-                    injectAndVerifyIkeMobikePackets(testNetworkWrapperNotMetered.ikeTunUtils);
-
-                    clearVcnConfigsAndVerifyNetworkTeardown(
-                            subGrp, cellNetworkCb, vcnSetupResult.vcnNetwork);
-                }
-            });
-        }
+    public void testVcnMigratesAfterPreferredUnderlyingNetworkDies_preferMetered()
+            throws Exception {
+        verifyVcnMigratesAfterPreferredUnderlyingNetworkDies(
+                createVcnConfigPrefersMetered(),
+                new NetworkSelectionTestNetworkParams(false /* isMetered */),
+                new NetworkSelectionTestNetworkParams(true /* isMetered */));
     }
 
     @Test
diff --git a/tests/tests/view/src/android/view/cts/PixelCopyViewProducerActivity.java b/tests/tests/view/src/android/view/cts/PixelCopyViewProducerActivity.java
index c89cdb8..9cff62b 100644
--- a/tests/tests/view/src/android/view/cts/PixelCopyViewProducerActivity.java
+++ b/tests/tests/view/src/android/view/cts/PixelCopyViewProducerActivity.java
@@ -72,6 +72,9 @@
         view.setSystemUiVisibility(View.SYSTEM_UI_FLAG_HIDE_NAVIGATION);
         mContent.getViewTreeObserver().addOnDrawListener(this);
         mContent.setOnApplyWindowInsetsListener(this);
+
+        //setDecorFitsSystemWindows to false will ignore the cutout
+        getWindow().setDecorFitsSystemWindows(false);
     }
 
     @Override
diff --git a/tests/tests/view/surfacevalidator/src/android/view/cts/surfacevalidator/ASurfaceControlTestActivity.java b/tests/tests/view/surfacevalidator/src/android/view/cts/surfacevalidator/ASurfaceControlTestActivity.java
index d1e388b..08c2231 100644
--- a/tests/tests/view/surfacevalidator/src/android/view/cts/surfacevalidator/ASurfaceControlTestActivity.java
+++ b/tests/tests/view/surfacevalidator/src/android/view/cts/surfacevalidator/ASurfaceControlTestActivity.java
@@ -97,6 +97,7 @@
         decorView.setPointerIcon(
                 PointerIcon.getSystemIcon(this, PointerIcon.TYPE_NULL));
         getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
+        getWindow().setDecorFitsSystemWindows(false);
 
         mLayoutParams = new FrameLayout.LayoutParams(DEFAULT_LAYOUT_WIDTH, DEFAULT_LAYOUT_HEIGHT,
                 Gravity.LEFT | Gravity.TOP);