Merge "Merge "C2 VTS: only InputBufferTest can be skipped for secure codecs" into android12-tests-dev am: f00bec8b65" into android12L-tests-dev am: b8e510be2e

Original change: https://android-review.googlesource.com/c/platform/frameworks/av/+/1936576

Change-Id: I326ece4f6103e3237bc25b8848b0c1aab7896227
Signed-off-by: Automerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>
diff --git a/.clang-format b/.clang-format
new file mode 100644
index 0000000..a7614d2
--- /dev/null
+++ b/.clang-format
@@ -0,0 +1,13 @@
+BasedOnStyle: Google
+Standard: Cpp11
+AccessModifierOffset: -2
+AllowShortFunctionsOnASingleLine: Inline
+ColumnLimit: 100
+CommentPragmas: NOLINT:.*
+DerivePointerAlignment: false
+IncludeBlocks: Preserve
+IndentWidth: 4
+ContinuationIndentWidth: 8
+PointerAlignment: Left
+TabWidth: 4
+UseTab: Never
diff --git a/Android.bp b/Android.bp
index 60f0ff1..ee609e1 100644
--- a/Android.bp
+++ b/Android.bp
@@ -57,7 +57,7 @@
             min_sdk_version: "29",
             apex_available: [
                 "//apex_available:platform",
-                "com.android.bluetooth.updatable",
+                "com.android.bluetooth",
                 "com.android.media",
                 "com.android.media.swcodec",
             ],
@@ -86,7 +86,7 @@
     min_sdk_version: "29",
     apex_available: [
         "//apex_available:platform",
-        "com.android.bluetooth.updatable",
+        "com.android.bluetooth",
         "com.android.media",
         "com.android.media.swcodec",
     ],
diff --git a/METADATA b/METADATA
index aabda36..146bfcb 100644
--- a/METADATA
+++ b/METADATA
@@ -2,22 +2,22 @@
 #     CONSULT THE OWNERS AND opensource-licensing@google.com BEFORE
 #     DEPENDING ON IT IN YOUR PROJECT. ***
 third_party {
-  # would be NOTICE save for Widevine Master License Agreement in:
-  #   drm/mediadrm/plugins/clearkey/hidl/DeviceFiles.cpp
-  #   drm/mediadrm/plugins/clearkey/hidl/MemoryFileSystem.cpp
-  #   drm/mediadrm/plugins/clearkey/hidl/include/DeviceFiles.h
-  #   drm/mediadrm/plugins/clearkey/hidl/protos/DeviceFiles.proto
-  #   drm/mediadrm/plugins/clearkey/hidl/include/MemoryFileSystem.h
-  # and patent disclaimers in:
-  #   media/codec2/components/aac/patent_disclaimer.txt
-  #   media/codec2/components/amr_nb_wb/patent_disclaimer.txt
-  #   media/codec2/components/mp3/patent_disclaimer.txt
-  #   media/codec2/components/mpeg4_h263/patent_disclaimer.txt
-  #   media/codecs/amrnb/patent_disclaimer.txt
-  #   media/codecs/amrwb/dec/patent_disclaimer.txt
-  #   media/codecs/amrwb/enc/patent_disclaimer.txt
-  #   media/codecs/m4v_h263/patent_disclaimer.txt
-  #   media/codecs/mp3dec/patent_disclaimer.txt
-  #   media/libstagefright/codecs/aacenc/patent_disclaimer.txt
+  license_note: "would be NOTICE save for Widevine Master License Agreement in:\n"
+  "   drm/mediadrm/plugins/clearkey/hidl/DeviceFiles.cpp\n"
+  "   drm/mediadrm/plugins/clearkey/hidl/MemoryFileSystem.cpp\n"
+  "   drm/mediadrm/plugins/clearkey/hidl/include/DeviceFiles.h\n"
+  "   drm/mediadrm/plugins/clearkey/hidl/protos/DeviceFiles.proto\n"
+  "   drm/mediadrm/plugins/clearkey/hidl/include/MemoryFileSystem.h\n"
+  " and patent disclaimers in:\n"
+  "   media/codec2/components/aac/patent_disclaimer.txt\n"
+  "   media/codec2/components/amr_nb_wb/patent_disclaimer.txt\n"
+  "   media/codec2/components/mp3/patent_disclaimer.txt\n"
+  "   media/codec2/components/mpeg4_h263/patent_disclaimer.txt\n"
+  "   media/codecs/amrnb/patent_disclaimer.txt\n"
+  "   media/codecs/amrwb/dec/patent_disclaimer.txt\n"
+  "   media/codecs/amrwb/enc/patent_disclaimer.txt\n"
+  "   media/codecs/m4v_h263/patent_disclaimer.txt\n"
+  "   media/codecs/mp3dec/patent_disclaimer.txt\n"
+  "   media/libstagefright/codecs/aacenc/patent_disclaimer.txt"
   license_type: BY_EXCEPTION_ONLY
 }
diff --git a/OWNERS b/OWNERS
index 0be1196..40c65e7 100644
--- a/OWNERS
+++ b/OWNERS
@@ -1,7 +1,6 @@
 # Bug component: 1344
 elaurent@google.com
 etalvala@google.com
-hkuang@google.com
 lajos@google.com
 
 # go/android-fwk-media-solutions for info on areas of ownership.
diff --git a/apex/Android.bp b/apex/Android.bp
index b9abd12..b9b9bde 100644
--- a/apex/Android.bp
+++ b/apex/Android.bp
@@ -23,7 +23,6 @@
 
 apex_defaults {
     name: "com.android.media-defaults",
-    updatable: true,
     bootclasspath_fragments: ["com.android.media-bootclasspath-fragment"],
     systemserverclasspath_fragments: ["com.android.media-systemserverclasspath-fragment"],
     multilib: {
@@ -67,14 +66,13 @@
     // Use a custom AndroidManifest.xml used for API targeting.
     androidManifest: ":com.android.media-androidManifest",
 
-    // IMPORTANT: For the APEX to be installed on Android 10 (API 29),
-    // min_sdk_version should be 29. This enables the build system to make
+    // IMPORTANT: q-launched-apex-module enables the build system to make
     // sure the package compatible to Android 10 in two ways:
     // - build the APEX package compatible to Android 10
     //   so that the package can be installed.
     // - build artifacts (lib/javalib/bin) against Android 10 SDK
     //   so that the artifacts can run.
-    min_sdk_version: "29",
+    defaults: ["q-launched-apex-module"],
     // Indicates that pre-installed version of this apex can be compressed.
     // Whether it actually will be compressed is controlled on per-device basis.
     compressible: true,
@@ -126,6 +124,26 @@
     // modified by the Soong or platform compat team.
     hidden_api: {
         max_target_o_low_priority: ["hiddenapi/hiddenapi-max-target-o-low-priority.txt"],
+
+        // The following packages contain classes from other modules on the
+        // bootclasspath. That means that the hidden API flags for this module
+        // has to explicitly list every single class this module provides in
+        // that package to differentiate them from the classes provided by other
+        // modules. That can include private classes that are not part of the
+        // API.
+        split_packages: [
+            "android.media",
+        ],
+
+        // The following packages and all their subpackages currently only
+        // contain classes from this bootclasspath_fragment. Listing a package
+        // here won't prevent other bootclasspath modules from adding classes in
+        // any of those packages but it will prevent them from adding those
+        // classes into an API surface, e.g. public, system, etc.. Doing so will
+        // result in a build failure due to inconsistent flags.
+        package_prefixes: [
+            "android.media.internal",
+        ],
     },
 }
 
@@ -148,7 +166,6 @@
 
 apex_defaults {
     name: "com.android.media.swcodec-defaults",
-    updatable: true,
     binaries: [
         "mediaswcodec",
     ],
@@ -172,14 +189,13 @@
     // Use a custom AndroidManifest.xml used for API targeting.
     androidManifest: ":com.android.media.swcodec-androidManifest",
 
-    // IMPORTANT: For the APEX to be installed on Android 10 (API 29),
-    // min_sdk_version should be 29. This enables the build system to make
+    // IMPORTANT: q-launched-apex-module enables the build system to make
     // sure the package compatible to Android 10 in two ways:
     // - build the APEX package compatible to Android 10
     //   so that the package can be installed.
     // - build artifacts (lib/javalib/bin) against Android 10 SDK
     //   so that the artifacts can run.
-    min_sdk_version: "29",
+    defaults: ["q-launched-apex-module"],
     // Indicates that pre-installed version of this apex can be compressed.
     // Whether it actually will be compressed is controlled on per-device basis.
     compressible: true,
diff --git a/apex/OWNERS b/apex/OWNERS
index 5587f5f..54802d4 100644
--- a/apex/OWNERS
+++ b/apex/OWNERS
@@ -1,6 +1,7 @@
-chz@google.com
-dwkang@google.com
+essick@google.com
 jiyong@google.com
 lajos@google.com
-marcone@google.com
-wjia@google.com
+nchalko@google.com
+
+include platform/packages/modules/common:/MODULES_OWNERS
+
diff --git a/apex/mediaswcodec.32rc b/apex/mediaswcodec.32rc
new file mode 100644
index 0000000..79aef36
--- /dev/null
+++ b/apex/mediaswcodec.32rc
@@ -0,0 +1,6 @@
+service media.swcodec /apex/com.android.media.swcodec/bin/mediaswcodec
+    class main
+    user mediacodec
+    group camera drmrpc mediadrm
+    ioprio rt 4
+    task_profiles ProcessCapacityHigh
diff --git a/apex/mediatranscoding.32rc b/apex/mediatranscoding.32rc
new file mode 100644
index 0000000..5169462
--- /dev/null
+++ b/apex/mediatranscoding.32rc
@@ -0,0 +1,12 @@
+# media.transcoding service is defined on com.android.media apex which goes back
+# to API29, but we only want it started on API31+ devices. So we declare it as
+# "disabled" and start it explicitly on boot.
+service media.transcoding /apex/com.android.media/bin/mediatranscoding
+    class main
+    user media
+    group media
+    ioprio rt 4
+    # Restrict to little cores only with system-background cpuset.
+    task_profiles ServiceCapacityLow
+    interface aidl media.transcoding
+    disabled
diff --git a/camera/Android.bp b/camera/Android.bp
index 6878c20..4ed3269 100644
--- a/camera/Android.bp
+++ b/camera/Android.bp
@@ -43,6 +43,10 @@
     ],
 }
 
+cc_library_headers {
+    name: "camera_headers",
+    export_include_dirs: ["include"],
+}
 cc_library_shared {
     name: "libcamera_client",
 
diff --git a/camera/OWNERS b/camera/OWNERS
index 1b548e4..2a1d523 100644
--- a/camera/OWNERS
+++ b/camera/OWNERS
@@ -1,3 +1,4 @@
+
 # Bug component: 41727
 etalvala@google.com
 arakesh@google.com
@@ -5,4 +6,3 @@
 jchowdhary@google.com
 shuzhenwang@google.com
 ruchamk@google.com
-
diff --git a/camera/VendorTagDescriptor.cpp b/camera/VendorTagDescriptor.cpp
index 24fa912..b37803a 100644
--- a/camera/VendorTagDescriptor.cpp
+++ b/camera/VendorTagDescriptor.cpp
@@ -760,7 +760,7 @@
     Mutex::Autolock al(sLock);
     if (sGlobalVendorTagDescriptorCache == NULL) {
         ALOGE("%s: Vendor tag descriptor cache not initialized.", __FUNCTION__);
-        return VENDOR_TAG_NAME_ERR;
+        return VENDOR_TAG_TYPE_ERR;
     }
     return sGlobalVendorTagDescriptorCache->getTagType(tag, id);
 }
diff --git a/camera/ndk/impl/ACameraManager.h b/camera/ndk/impl/ACameraManager.h
index ccbfaa9..da887a2 100644
--- a/camera/ndk/impl/ACameraManager.h
+++ b/camera/ndk/impl/ACameraManager.h
@@ -139,6 +139,8 @@
             return !(*this == other);
         }
         bool operator < (const Callback& other) const {
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wordered-compare-function-pointers"
             if (*this == other) return false;
             if (mContext != other.mContext) return mContext < other.mContext;
             if (mPhysicalCamAvailable != other.mPhysicalCamAvailable) {
@@ -152,6 +154,7 @@
             }
             if (mAvailable != other.mAvailable) return mAvailable < other.mAvailable;
             return mUnavailable < other.mUnavailable;
+#pragma GCC diagnostic pop
         }
         bool operator > (const Callback& other) const {
             return (*this != other && !(*this < other));
diff --git a/camera/ndk/impl/ACameraMetadata.cpp b/camera/ndk/impl/ACameraMetadata.cpp
index dab2fef..05124c0 100644
--- a/camera/ndk/impl/ACameraMetadata.cpp
+++ b/camera/ndk/impl/ACameraMetadata.cpp
@@ -182,7 +182,7 @@
         int64_t format = entry.data.i64[i + STREAM_FORMAT_OFFSET];
         int64_t width = entry.data.i64[i + STREAM_WIDTH_OFFSET];
         int64_t height = entry.data.i64[i + STREAM_HEIGHT_OFFSET];
-        int64_t duration = entry.data.i32[i + STREAM_DURATION_OFFSET];
+        int64_t duration = entry.data.i64[i + STREAM_DURATION_OFFSET];
 
         // Leave the unfiltered format in so apps depending on previous wrong
         // filter behavior continue to work
diff --git a/camera/ndk/ndk_vendor/impl/ACameraManager.h b/camera/ndk/ndk_vendor/impl/ACameraManager.h
index 8359bb1..4663529 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraManager.h
+++ b/camera/ndk/ndk_vendor/impl/ACameraManager.h
@@ -136,6 +136,8 @@
             return !(*this == other);
         }
         bool operator < (const Callback& other) const {
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wordered-compare-function-pointers"
             if (*this == other) return false;
             if (mContext != other.mContext) return mContext < other.mContext;
             if (mAvailable != other.mAvailable) return mAvailable < other.mAvailable;
@@ -146,6 +148,7 @@
             if (mPhysicalCamUnavailable != other.mPhysicalCamUnavailable)
                     return mPhysicalCamUnavailable < other.mPhysicalCamUnavailable;
             return mUnavailable < other.mUnavailable;
+#pragma GCC diagnostic pop
         }
         bool operator > (const Callback& other) const {
             return (*this != other && !(*this < other));
diff --git a/cmds/OWNERS b/cmds/OWNERS
index 0d32aac..a48c37a 100644
--- a/cmds/OWNERS
+++ b/cmds/OWNERS
@@ -1,3 +1,3 @@
 elaurent@google.com
+essick@google.com
 lajos@google.com
-marcone@google.com
diff --git a/cmds/stagefright/Android.bp b/cmds/stagefright/Android.bp
new file mode 100644
index 0000000..c4783d3
--- /dev/null
+++ b/cmds/stagefright/Android.bp
@@ -0,0 +1,278 @@
+package {
+    default_applicable_licenses: ["frameworks_av_cmds_stagefright_license"],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "frameworks_av_cmds_stagefright_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "NOTICE",
+    ],
+}
+
+cc_binary {
+    name: "stagefright",
+
+    srcs: [
+        "AudioPlayer.cpp",
+        "stagefright.cpp",
+        "jpeg.cpp",
+        "SineSource.cpp",
+    ],
+
+    header_libs: [
+        "libmediametrics_headers",
+        "libstagefright_headers",
+    ],
+
+    shared_libs: [
+        "libstagefright",
+        "libmedia",
+        "libmedia_codeclist",
+        "libutils",
+        "libbinder",
+        "libstagefright_foundation",
+        "libjpeg",
+        "libui",
+        "libgui",
+        "libcutils",
+        "liblog",
+        "libhidlbase",
+        "libdatasource",
+        "libaudioclient",
+        "android.hardware.media.omx@1.0",
+        "framework-permission-aidl-cpp",
+    ],
+
+    static_libs: ["framework-permission-aidl-cpp"],
+
+    cflags: [
+        "-Wno-multichar",
+    ],
+
+    system_ext_specific: true,
+}
+
+cc_binary {
+    name: "record",
+
+    srcs: [
+        "AudioPlayer.cpp",
+        "SineSource.cpp",
+        "record.cpp",
+    ],
+
+    header_libs: [
+        "libmediametrics_headers",
+        "libstagefright_headers",
+        "camera_headers",
+    ],
+
+    shared_libs: [
+        "libstagefright",
+        "libmedia",
+        "liblog",
+        "libutils",
+        "libbinder",
+        "libstagefright_foundation",
+        "libdatasource",
+        "libaudioclient",
+        "framework-permission-aidl-cpp",
+    ],
+
+    cflags: [
+        "-Wno-multichar",
+    ],
+}
+
+cc_binary {
+    name: "recordvideo",
+
+    srcs: [
+        "AudioPlayer.cpp",
+        "recordvideo.cpp",
+    ],
+
+    header_libs: [
+        "libmediametrics_headers",
+        "libstagefright_headers",
+    ],
+
+    shared_libs: [
+        "libstagefright",
+        "libmedia",
+        "liblog",
+        "libutils",
+        "libbinder",
+        "libstagefright_foundation",
+        "libaudioclient",
+        "framework-permission-aidl-cpp",
+    ],
+
+    cflags: [
+        "-Wno-multichar",
+    ],
+}
+
+cc_binary {
+    name: "audioloop",
+
+    srcs: [
+        "AudioPlayer.cpp",
+        "SineSource.cpp",
+        "audioloop.cpp",
+    ],
+
+    header_libs: [
+        "libmediametrics_headers",
+        "libstagefright_headers",
+    ],
+
+    shared_libs: [
+        "libstagefright",
+        "libmedia",
+        "liblog",
+        "libutils",
+        "libbinder",
+        "libstagefright_foundation",
+        "libaudioclient",
+        "framework-permission-aidl-cpp",
+    ],
+
+    cflags: [
+        "-Wno-multichar",
+    ],
+}
+
+cc_binary {
+    name: "stream",
+
+    srcs: ["stream.cpp"],
+
+    header_libs: [
+        "libmediametrics_headers",
+        "libstagefright_headers",
+    ],
+
+    shared_libs: [
+        "libstagefright",
+        "liblog",
+        "libutils",
+        "libbinder",
+        "libui",
+        "libgui",
+        "libstagefright_foundation",
+        "libmedia",
+        "libcutils",
+        "libdatasource",
+    ],
+
+    cflags: [
+        "-Wno-multichar",
+    ],
+}
+
+cc_binary {
+    name: "codec",
+
+    srcs: [
+        "codec.cpp",
+        "SimplePlayer.cpp",
+    ],
+
+    header_libs: [
+        "libmediadrm_headers",
+        "libmediametrics_headers",
+        "libstagefright_headers",
+    ],
+
+    shared_libs: [
+        "libstagefright",
+        "liblog",
+        "libutils",
+        "libbinder",
+        "libstagefright_foundation",
+        "libmedia",
+        "libmedia_omx",
+        "libaudioclient",
+        "libui",
+        "libgui",
+        "libcutils",
+    ],
+
+    cflags: [
+        "-Wno-multichar",
+    ],
+}
+
+cc_binary {
+    name: "mediafilter",
+
+    srcs: [
+        "filters/argbtorgba.rscript",
+        "filters/nightvision.rscript",
+        "filters/saturation.rscript",
+        "mediafilter.cpp",
+    ],
+
+    header_libs: [
+        "libmediadrm_headers",
+        "libmediametrics_headers",
+        "libstagefright_headers",
+        "rs-headers",
+    ],
+
+    include_dirs: ["frameworks/av/media/libstagefright"],
+
+    shared_libs: [
+        "libstagefright",
+        "liblog",
+        "libutils",
+        "libbinder",
+        "libstagefright_foundation",
+        "libmedia_omx",
+        "libui",
+        "libgui",
+        "libRScpp",
+    ],
+
+    static_libs: ["libstagefright_mediafilter"],
+
+    cflags: [
+        "-Wno-multichar",
+    ],
+
+    sanitize: {
+        cfi: true,
+    },
+}
+
+cc_binary {
+    name: "muxer",
+
+    srcs: ["muxer.cpp"],
+
+    header_libs: [
+        "libmediametrics_headers",
+        "libstagefright_headers",
+    ],
+
+    shared_libs: [
+        "libstagefright",
+        "liblog",
+        "libutils",
+        "libbinder",
+        "libstagefright_foundation",
+        "libcutils",
+        "libc",
+    ],
+
+    cflags: [
+        "-Wno-multichar",
+    ],
+}
diff --git a/cmds/stagefright/Android.mk b/cmds/stagefright/Android.mk
deleted file mode 100644
index 803c4a4..0000000
--- a/cmds/stagefright/Android.mk
+++ /dev/null
@@ -1,276 +0,0 @@
-LOCAL_PATH:= $(call my-dir)
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:=       \
-        AudioPlayer.cpp \
-        stagefright.cpp \
-        jpeg.cpp        \
-        SineSource.cpp
-
-LOCAL_HEADER_LIBRARIES := \
-        libmediametrics_headers \
-
-LOCAL_SHARED_LIBRARIES := \
-        libstagefright libmedia libmedia_codeclist libutils libbinder \
-        libstagefright_foundation libjpeg libui libgui libcutils liblog \
-        libhidlbase libdatasource libaudioclient \
-        android.hardware.media.omx@1.0 \
-        framework-permission-aidl-cpp
-
-LOCAL_STATIC_LIBRARIES := framework-permission-aidl-cpp
-
-LOCAL_C_INCLUDES:= \
-        frameworks/av/media/libstagefright \
-        frameworks/av/media/libstagefright/include \
-        frameworks/native/include/media/openmax \
-
-LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
-
-LOCAL_MODULE_TAGS := optional
-
-LOCAL_SYSTEM_EXT_MODULE:= true
-LOCAL_MODULE:= stagefright
-LOCAL_LICENSE_KINDS:= SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS:= notice
-LOCAL_NOTICE_FILE:= $(LOCAL_PATH)/NOTICE
-
-include $(BUILD_EXECUTABLE)
-
-################################################################################
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:=         \
-        AudioPlayer.cpp \
-        SineSource.cpp    \
-        record.cpp
-
-LOCAL_HEADER_LIBRARIES := \
-        libmediametrics_headers \
-
-LOCAL_SHARED_LIBRARIES := \
-        libstagefright libmedia liblog libutils libbinder \
-        libstagefright_foundation libdatasource libaudioclient \
-        framework-permission-aidl-cpp
-
-LOCAL_C_INCLUDES:= \
-        frameworks/av/camera/include \
-        frameworks/av/media/libstagefright \
-        frameworks/native/include/media/openmax \
-        frameworks/native/include/media/hardware
-
-LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
-
-LOCAL_MODULE_TAGS := optional
-
-LOCAL_MODULE:= record
-LOCAL_LICENSE_KINDS:= SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS:= notice
-LOCAL_NOTICE_FILE:= $(LOCAL_PATH)/NOTICE
-
-include $(BUILD_EXECUTABLE)
-
-################################################################################
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:=         \
-        AudioPlayer.cpp \
-        recordvideo.cpp
-
-LOCAL_HEADER_LIBRARIES := \
-        libmediametrics_headers \
-
-LOCAL_SHARED_LIBRARIES := \
-        libstagefright libmedia liblog libutils libbinder \
-        libstagefright_foundation libaudioclient
-
-LOCAL_C_INCLUDES:= \
-        frameworks/av/media/libstagefright \
-        frameworks/native/include/media/openmax \
-        frameworks/native/include/media/hardware \
-        framework-permission-aidl-cpp
-
-LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
-
-LOCAL_MODULE_TAGS := optional
-
-LOCAL_MODULE:= recordvideo
-LOCAL_LICENSE_KINDS:= SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS:= notice
-LOCAL_NOTICE_FILE:= $(LOCAL_PATH)/NOTICE
-
-include $(BUILD_EXECUTABLE)
-
-
-################################################################################
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:=         \
-        AudioPlayer.cpp \
-        SineSource.cpp    \
-        audioloop.cpp
-
-LOCAL_HEADER_LIBRARIES := \
-        libmediametrics_headers \
-
-LOCAL_SHARED_LIBRARIES := \
-        libstagefright libmedia liblog libutils libbinder \
-        libstagefright_foundation libaudioclient \
-        framework-permission-aidl-cpp
-
-LOCAL_C_INCLUDES:= \
-        frameworks/av/media/libstagefright \
-        frameworks/native/include/media/openmax
-
-LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
-
-LOCAL_MODULE_TAGS := optional
-
-LOCAL_MODULE:= audioloop
-LOCAL_LICENSE_KINDS:= SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS:= notice
-LOCAL_NOTICE_FILE:= $(LOCAL_PATH)/NOTICE
-
-include $(BUILD_EXECUTABLE)
-
-################################################################################
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:=         \
-        stream.cpp    \
-
-LOCAL_HEADER_LIBRARIES := \
-        libmediametrics_headers \
-
-LOCAL_SHARED_LIBRARIES := \
-        libstagefright liblog libutils libbinder libui libgui \
-        libstagefright_foundation libmedia libcutils libdatasource
-
-LOCAL_C_INCLUDES:= \
-        frameworks/av/media/libstagefright \
-        frameworks/native/include/media/openmax
-
-LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
-
-LOCAL_MODULE_TAGS := optional
-
-LOCAL_MODULE:= stream
-LOCAL_LICENSE_KINDS:= SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS:= notice
-LOCAL_NOTICE_FILE:= $(LOCAL_PATH)/NOTICE
-
-include $(BUILD_EXECUTABLE)
-
-################################################################################
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:=               \
-        codec.cpp               \
-        SimplePlayer.cpp        \
-
-LOCAL_HEADER_LIBRARIES := \
-        libmediadrm_headers \
-        libmediametrics_headers \
-
-LOCAL_SHARED_LIBRARIES := \
-        libstagefright liblog libutils libbinder libstagefright_foundation \
-        libmedia libmedia_omx libaudioclient libui libgui libcutils
-
-LOCAL_C_INCLUDES:= \
-        frameworks/av/media/libstagefright \
-        frameworks/native/include/media/openmax
-
-LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
-
-LOCAL_MODULE_TAGS := optional
-
-LOCAL_MODULE:= codec
-LOCAL_LICENSE_KINDS:= SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS:= notice
-LOCAL_NOTICE_FILE:= $(LOCAL_PATH)/NOTICE
-
-include $(BUILD_EXECUTABLE)
-
-################################################################################
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:= \
-        filters/argbtorgba.rscript \
-        filters/nightvision.rscript \
-        filters/saturation.rscript \
-        mediafilter.cpp \
-
-LOCAL_HEADER_LIBRARIES := \
-        libmediadrm_headers \
-        libmediametrics_headers \
-
-LOCAL_SHARED_LIBRARIES := \
-        libstagefright \
-        liblog \
-        libutils \
-        libbinder \
-        libstagefright_foundation \
-        libmedia_omx \
-        libui \
-        libgui \
-        libRScpp \
-
-LOCAL_C_INCLUDES:= \
-        frameworks/av/media/libstagefright \
-        frameworks/native/include/media/openmax \
-        frameworks/rs/cpp \
-        frameworks/rs \
-
-intermediates := $(call intermediates-dir-for,STATIC_LIBRARIES,libRS,TARGET,)
-LOCAL_C_INCLUDES += $(intermediates)
-
-LOCAL_STATIC_LIBRARIES:= \
-        libstagefright_mediafilter
-
-LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
-
-LOCAL_MODULE_TAGS := optional
-
-LOCAL_MODULE:= mediafilter
-LOCAL_LICENSE_KINDS:= SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS:= notice
-LOCAL_NOTICE_FILE:= $(LOCAL_PATH)/NOTICE
-
-LOCAL_SANITIZE := cfi
-
-include $(BUILD_EXECUTABLE)
-
-################################################################################
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:=               \
-        muxer.cpp            \
-
-LOCAL_HEADER_LIBRARIES := \
-        libmediametrics_headers \
-
-LOCAL_SHARED_LIBRARIES := \
-        libstagefright liblog libutils libbinder libstagefright_foundation \
-        libcutils libc
-
-LOCAL_C_INCLUDES:= \
-        frameworks/av/media/libstagefright \
-        frameworks/native/include/media/openmax
-
-LOCAL_CFLAGS += -Wno-multichar -Werror -Wall
-
-LOCAL_MODULE_TAGS := optional
-
-LOCAL_MODULE:= muxer
-LOCAL_LICENSE_KINDS:= SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS:= notice
-LOCAL_NOTICE_FILE:= $(LOCAL_PATH)/NOTICE
-
-include $(BUILD_EXECUTABLE)
diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp
index c430f05..6d1263e 100644
--- a/cmds/stagefright/stagefright.cpp
+++ b/cmds/stagefright/stagefright.cpp
@@ -70,6 +70,10 @@
 
 using namespace android;
 
+namespace {
+    constexpr static int PIXEL_FORMAT_RGBA_1010102_AS_8888 = -HAL_PIXEL_FORMAT_RGBA_1010102;
+}
+
 static long gNumRepetitions;
 static long gMaxNumFrames;  // 0 means decode all available.
 static long gReproduceBug;  // if not -1.
@@ -629,7 +633,14 @@
     fprintf(stderr, "       -m max-number-of-frames-to-decode in each pass\n");
     fprintf(stderr, "       -b bug to reproduce\n");
     fprintf(stderr, "       -i(nfo) dump codec info (profiles and color formats supported, details)\n");
-    fprintf(stderr, "       -t(humbnail) extract video thumbnail or album art\n");
+    fprintf(stderr, "       -t(humbnail) extract video thumbnail or album art (/sdcard/out.jpg)\n");
+    fprintf(stderr, "       -P(ixelFormat) pixel format to use for raw thumbnail "
+                    "(/sdcard/out.raw)\n");
+    fprintf(stderr, "          %d: RGBA_565\n", HAL_PIXEL_FORMAT_RGB_565);
+    fprintf(stderr, "          %d: RGBA_8888\n", HAL_PIXEL_FORMAT_RGBA_8888);
+    fprintf(stderr, "          %d: BGRA_8888\n", HAL_PIXEL_FORMAT_BGRA_8888);
+    fprintf(stderr, "          %d: RGBA_1010102\n", HAL_PIXEL_FORMAT_RGBA_1010102);
+    fprintf(stderr, "          %d: RGBA_1010102 as RGBA_8888\n", PIXEL_FORMAT_RGBA_1010102_AS_8888);
     fprintf(stderr, "       -s(oftware) prefer software codec\n");
     fprintf(stderr, "       -r(hardware) force to use hardware codec\n");
     fprintf(stderr, "       -o playback audio\n");
@@ -787,6 +798,7 @@
     bool useSurfaceTexAlloc = false;
     bool dumpStream = false;
     bool dumpPCMStream = false;
+    int32_t pixelFormat = 0;        // thumbnail pixel format
     String8 dumpStreamFilename;
     gNumRepetitions = 1;
     gMaxNumFrames = 0;
@@ -800,7 +812,7 @@
     sp<android::ALooper> looper;
 
     int res;
-    while ((res = getopt(argc, argv, "vhaqn:lm:b:itsrow:kN:xSTd:D:")) >= 0) {
+    while ((res = getopt(argc, argv, "vhaqn:lm:b:itsrow:kN:xSTd:D:P:")) >= 0) {
         switch (res) {
             case 'a':
             {
@@ -841,6 +853,7 @@
                 break;
             }
 
+            case 'P':
             case 'm':
             case 'n':
             case 'b':
@@ -856,6 +869,8 @@
                     gNumRepetitions = x;
                 } else if (res == 'm') {
                     gMaxNumFrames = x;
+                } else if (res == 'P') {
+                    pixelFormat = x;
                 } else {
                     CHECK_EQ(res, 'b');
                     gReproduceBug = x;
@@ -978,24 +993,71 @@
             close(fd);
             fd = -1;
 
+            uint32_t retrieverPixelFormat = HAL_PIXEL_FORMAT_RGB_565;
+            if (pixelFormat == PIXEL_FORMAT_RGBA_1010102_AS_8888) {
+                retrieverPixelFormat = HAL_PIXEL_FORMAT_RGBA_1010102;
+            } else if (pixelFormat) {
+                retrieverPixelFormat = pixelFormat;
+            }
             sp<IMemory> mem =
                     retriever->getFrameAtTime(-1,
                             MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC,
-                            HAL_PIXEL_FORMAT_RGB_565,
-                            false /*metaOnly*/);
+                            retrieverPixelFormat, false /*metaOnly*/);
 
             if (mem != NULL) {
                 failed = false;
-                printf("getFrameAtTime(%s) => OK\n", filename);
+                printf("getFrameAtTime(%s) format=%d => OK\n", filename, retrieverPixelFormat);
 
                 VideoFrame *frame = (VideoFrame *)mem->unsecurePointer();
 
-                CHECK_EQ(writeJpegFile("/sdcard/out.jpg",
-                            frame->getFlattenedData(),
-                            frame->mWidth, frame->mHeight), 0);
+                if (pixelFormat) {
+                    int bpp = 0;
+                    switch (pixelFormat) {
+                    case HAL_PIXEL_FORMAT_RGB_565:
+                        bpp = 2;
+                        break;
+                    case PIXEL_FORMAT_RGBA_1010102_AS_8888:
+                        // convert RGBA_1010102 to RGBA_8888
+                        {
+                            uint32_t *data = (uint32_t *)frame->getFlattenedData();
+                            uint32_t *end = data + frame->mWidth * frame->mHeight;
+                            for (; data < end; ++data) {
+                                *data =
+                                    // pick out 8-bit R, G, B values and move them to the
+                                    // correct position
+                                    ( (*data &      0x3fc) >> 2) | // R
+                                    ( (*data &    0xff000) >> 4) | // G
+                                    ( (*data & 0x3fc00000) >> 6) | // B
+                                    // pick out 2-bit A and expand to 8-bits
+                                    (((*data & 0xc0000000) >> 6) * 0x55);
+                            }
+                        }
+
+                        FALLTHROUGH_INTENDED;
+
+                    case HAL_PIXEL_FORMAT_RGBA_1010102:
+                    case HAL_PIXEL_FORMAT_RGBA_8888:
+                    case HAL_PIXEL_FORMAT_BGRA_8888:
+                        bpp = 4;
+                        break;
+                    }
+                    if (bpp) {
+                        FILE *out = fopen("/sdcard/out.raw", "wb");
+                        fwrite(frame->getFlattenedData(), bpp * frame->mWidth, frame->mHeight, out);
+                        fclose(out);
+
+                        printf("write out %d x %d x %dbpp\n", frame->mWidth, frame->mHeight, bpp);
+                    } else {
+                        printf("unknown pixel format.\n");
+                    }
+                } else {
+                    CHECK_EQ(writeJpegFile("/sdcard/out.jpg",
+                                frame->getFlattenedData(),
+                                frame->mWidth, frame->mHeight), 0);
+                }
             }
 
-            {
+            if (!pixelFormat) {
                 mem = retriever->extractAlbumArt();
 
                 if (mem != NULL) {
diff --git a/drm/OWNERS b/drm/OWNERS
index e788754..090c021 100644
--- a/drm/OWNERS
+++ b/drm/OWNERS
@@ -1 +1,3 @@
 jtinker@google.com
+kelzhan@google.com
+robertshih@google.com
diff --git a/drm/drmserver/drmserver.rc b/drm/drmserver/drmserver.rc
index eb176c1..0319ff9 100644
--- a/drm/drmserver/drmserver.rc
+++ b/drm/drmserver/drmserver.rc
@@ -3,7 +3,7 @@
     class main
     user drm
     group drm system inet drmrpc readproc
-    writepid /dev/cpuset/foreground/tasks
+    task_profiles ProcessCapacityHigh
 
 on property:drm.service.enabled=true
     start drm
diff --git a/drm/libmediadrm/Android.bp b/drm/libmediadrm/Android.bp
index 0ffe626..71df58c 100644
--- a/drm/libmediadrm/Android.bp
+++ b/drm/libmediadrm/Android.bp
@@ -66,7 +66,7 @@
     ],
 
     static_libs: [
-        "resourcemanager_aidl_interface-ndk_platform",
+        "resourcemanager_aidl_interface-ndk",
     ],
 
     export_shared_lib_headers: [
diff --git a/drm/libmediadrm/fuzzer/Android.bp b/drm/libmediadrm/fuzzer/Android.bp
index 7281066..49bbad4 100644
--- a/drm/libmediadrm/fuzzer/Android.bp
+++ b/drm/libmediadrm/fuzzer/Android.bp
@@ -35,7 +35,7 @@
     static_libs: [
         "libmediadrm",
         "liblog",
-        "resourcemanager_aidl_interface-ndk_platform",
+        "resourcemanager_aidl_interface-ndk",
     ],
     header_libs: [
         "libmedia_headers",
diff --git a/drm/mediadrm/plugins/TEST_MAPPING b/drm/mediadrm/plugins/TEST_MAPPING
index 7bd1568..9919e90 100644
--- a/drm/mediadrm/plugins/TEST_MAPPING
+++ b/drm/mediadrm/plugins/TEST_MAPPING
@@ -1,16 +1,10 @@
 {
   "presubmit": [
     {
-      "name": "CtsMediaTestCases",
+      "name": "CtsMediaDrmFrameworkTestCases",
       "options" : [
         {
           "include-annotation": "android.platform.test.annotations.Presubmit"
-        },
-        {
-          "include-filter": "android.media.cts.MediaDrmClearkeyTest"
-        },
-        {
-          "include-filter": "android.media.cts.MediaDrmMetricsTest"
         }
       ]
     }
diff --git a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.2-service-lazy.clearkey.rc b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.2-service-lazy.clearkey.rc
index 9afd3d7..ec4517d 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.2-service-lazy.clearkey.rc
+++ b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.2-service-lazy.clearkey.rc
@@ -11,4 +11,4 @@
     user media
     group media mediadrm
     ioprio rt 4
-    writepid /dev/cpuset/foreground/tasks
+    task_profiles ProcessCapacityHigh
diff --git a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.2-service.clearkey.rc b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.2-service.clearkey.rc
index c1abe7f..3b48cf2 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.2-service.clearkey.rc
+++ b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.2-service.clearkey.rc
@@ -10,4 +10,4 @@
     user media
     group media mediadrm
     ioprio rt 4
-    writepid /dev/cpuset/foreground/tasks
+    task_profiles ProcessCapacityHigh
diff --git a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.3-service-lazy.clearkey.rc b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.3-service-lazy.clearkey.rc
index 1e0d431..6e64978 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.3-service-lazy.clearkey.rc
+++ b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.3-service-lazy.clearkey.rc
@@ -13,4 +13,4 @@
     user media
     group media mediadrm
     ioprio rt 4
-    writepid /dev/cpuset/foreground/tasks
+    task_profiles ProcessCapacityHigh
diff --git a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.3-service.clearkey.rc b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.3-service.clearkey.rc
index 8130511..e302e1b 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.3-service.clearkey.rc
+++ b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.3-service.clearkey.rc
@@ -11,4 +11,4 @@
     user media
     group media mediadrm
     ioprio rt 4
-    writepid /dev/cpuset/foreground/tasks
+    task_profiles ProcessCapacityHigh
diff --git a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.4-service-lazy.clearkey.rc b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.4-service-lazy.clearkey.rc
index 46aba88..84a63a1 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.4-service-lazy.clearkey.rc
+++ b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.4-service-lazy.clearkey.rc
@@ -15,4 +15,4 @@
     user media
     group media mediadrm
     ioprio rt 4
-    writepid /dev/cpuset/foreground/tasks
+    task_profiles ProcessCapacityHigh
diff --git a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.4-service.clearkey.rc b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.4-service.clearkey.rc
index 8186933..649599e 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.4-service.clearkey.rc
+++ b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.4-service.clearkey.rc
@@ -13,4 +13,4 @@
     user media
     group media mediadrm
     ioprio rt 4
-    writepid /dev/cpuset/foreground/tasks
+    task_profiles ProcessCapacityHigh
diff --git a/include/OWNERS b/include/OWNERS
index d6bd998..e1d4db7 100644
--- a/include/OWNERS
+++ b/include/OWNERS
@@ -1,6 +1,6 @@
 elaurent@google.com
-gkasten@google.com
 hunga@google.com
 jtinker@google.com
 lajos@google.com
-marcone@google.com
+essick@google.com
+philburk@google.com
diff --git a/media/Android.mk b/media/Android.mk
new file mode 100644
index 0000000..220a358
--- /dev/null
+++ b/media/Android.mk
@@ -0,0 +1,5 @@
+LOCAL_PATH := $(call my-dir)
+
+$(eval $(call declare-1p-copy-files,frameworks/av/media/libeffects,audio_effects.conf))
+$(eval $(call declare-1p-copy-files,frameworks/av/media/libeffects,audio_effects.xml))
+$(eval $(call declare-1p-copy-files,frameworks/av/media/libstagefright,))
diff --git a/media/OWNERS b/media/OWNERS
index 4cf4870..4a25b68 100644
--- a/media/OWNERS
+++ b/media/OWNERS
@@ -1,7 +1,6 @@
 # Bug component: 1344
 elaurent@google.com
 essick@google.com
-hkuang@google.com
 hunga@google.com
 jiabin@google.com
 jmtrivi@google.com
@@ -15,6 +14,7 @@
 robertshih@google.com
 taklee@google.com
 wonsik@google.com
+ytai@google.com
 
 # go/android-fwk-media-solutions for info on areas of ownership.
 include platform/frameworks/av:/media/janitors/media_solutions_OWNERS
diff --git a/media/TEST_MAPPING b/media/TEST_MAPPING
index 5bc7262..41fe080 100644
--- a/media/TEST_MAPPING
+++ b/media/TEST_MAPPING
@@ -3,18 +3,18 @@
     "presubmit-large": [
         // runs whenever we change something in this tree
         {
-            "name": "CtsMediaTestCases",
+            "name": "CtsMediaCodecTestCases",
             "options": [
                 {
-                    "include-filter": "android.media.cts.EncodeDecodeTest"
+                    "include-filter": "android.media.codec.cts.EncodeDecodeTest"
                 }
             ]
         },
         {
-            "name": "CtsMediaTestCases",
+            "name": "CtsMediaCodecTestCases",
             "options": [
                 {
-                    "include-filter": "android.media.cts.DecodeEditEncodeTest"
+                    "include-filter": "android.media.codec.cts.DecodeEditEncodeTest"
                 }
             ]
         }
@@ -46,18 +46,18 @@
         // runs regularly, independent of changes in this tree.
         // signals if changes elsewhere break media functionality
         {
-            "name": "CtsMediaTestCases",
+            "name": "CtsMediaCodecTestCases",
             "options": [
                 {
-                    "include-filter": "android.media.cts.EncodeDecodeTest"
+                    "include-filter": "android.media.codec.cts.EncodeDecodeTest"
                 }
             ]
         },
         {
-            "name": "CtsMediaTestCases",
+            "name": "CtsMediaCodecTestCases",
             "options": [
                 {
-                    "include-filter": "android.media.cts.DecodeEditEncodeTest"
+                    "include-filter": "android.media.codec.cts.DecodeEditEncodeTest"
                 }
             ]
         }
diff --git a/media/audioserver/Android.bp b/media/audioserver/Android.bp
index be25ffb..e5f9907 100644
--- a/media/audioserver/Android.bp
+++ b/media/audioserver/Android.bp
@@ -25,6 +25,7 @@
     ],
 
     shared_libs: [
+        "packagemanager_aidl-cpp",
         "libaaudioservice",
         "libaudioflinger",
         "libaudiopolicyservice",
@@ -41,7 +42,6 @@
         "libpowermanager",
         "libutils",
         "libvibrator",
-
     ],
 
     // TODO check if we still need all of these include directories
diff --git a/media/audioserver/main_audioserver.cpp b/media/audioserver/main_audioserver.cpp
index 8ee1efb..c5ac7f9 100644
--- a/media/audioserver/main_audioserver.cpp
+++ b/media/audioserver/main_audioserver.cpp
@@ -73,10 +73,8 @@
         IPCThreadState::self()->joinThreadPool();
         for (;;) {
             siginfo_t info;
-            int ret = waitid(P_PID, childPid, &info, WEXITED | WSTOPPED | WCONTINUED);
-            if (ret == EINTR) {
-                continue;
-            }
+            int ret = TEMP_FAILURE_RETRY(waitid(P_PID, childPid, &info,
+                                                WEXITED | WSTOPPED | WCONTINUED));
             if (ret < 0) {
                 break;
             }
diff --git a/media/bufferpool/2.0/tests/Android.bp b/media/bufferpool/2.0/tests/Android.bp
index 803a813..5e26e3a 100644
--- a/media/bufferpool/2.0/tests/Android.bp
+++ b/media/bufferpool/2.0/tests/Android.bp
@@ -80,3 +80,22 @@
     ],
     compile_multilib: "both",
 }
+
+cc_test {
+    name: "BufferpoolUnitTest",
+    test_suites: ["device-tests"],
+    defaults: ["VtsHalTargetTestDefaults"],
+    srcs: [
+        "allocator.cpp",
+        "BufferpoolUnitTest.cpp",
+    ],
+    static_libs: [
+        "android.hardware.media.bufferpool@2.0",
+        "libcutils",
+        "libstagefright_bufferpool@2.0.1",
+    ],
+    shared_libs: [
+        "libfmq",
+    ],
+    compile_multilib: "both",
+}
diff --git a/media/bufferpool/2.0/tests/AndroidTest.xml b/media/bufferpool/2.0/tests/AndroidTest.xml
new file mode 100644
index 0000000..b027ad0
--- /dev/null
+++ b/media/bufferpool/2.0/tests/AndroidTest.xml
@@ -0,0 +1,23 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2021 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration description="Test module config for bufferpool unit tests">
+    <option name="test-suite-tag" value="BufferpoolUnitTest" />
+
+    <test class="com.android.tradefed.testtype.GTest" >
+        <option name="native-test-device-path" value="/data/local/tmp" />
+        <option name="module-name" value="BufferpoolUnitTest" />
+    </test>
+</configuration>
diff --git a/media/bufferpool/2.0/tests/BufferpoolUnitTest.cpp b/media/bufferpool/2.0/tests/BufferpoolUnitTest.cpp
new file mode 100644
index 0000000..b448405
--- /dev/null
+++ b/media/bufferpool/2.0/tests/BufferpoolUnitTest.cpp
@@ -0,0 +1,541 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "BufferpoolUnitTest"
+#include <utils/Log.h>
+
+#include <binder/ProcessState.h>
+#include <bufferpool/ClientManager.h>
+#include <gtest/gtest.h>
+#include <hidl/LegacySupport.h>
+#include <sys/types.h>
+#include <sys/wait.h>
+#include <unordered_set>
+#include <vector>
+#include "allocator.h"
+
+using android::hardware::configureRpcThreadpool;
+using android::hardware::media::bufferpool::BufferPoolData;
+using android::hardware::media::bufferpool::V2_0::IClientManager;
+using android::hardware::media::bufferpool::V2_0::ResultStatus;
+using android::hardware::media::bufferpool::V2_0::implementation::BufferId;
+using android::hardware::media::bufferpool::V2_0::implementation::ClientManager;
+using android::hardware::media::bufferpool::V2_0::implementation::ConnectionId;
+using android::hardware::media::bufferpool::V2_0::implementation::TransactionId;
+
+using namespace android;
+
+// communication message types between processes.
+enum PipeCommand : int32_t {
+    INIT,
+    TRANSFER,
+    STOP,
+
+    INIT_OK,
+    INIT_ERROR,
+    TRANSFER_OK,
+    TRANSFER_ERROR,
+    STOP_OK,
+    STOP_ERROR,
+};
+
+// communication message between processes.
+union PipeMessage {
+    struct {
+        int32_t command;
+        int32_t memsetValue;
+        BufferId bufferId;
+        ConnectionId connectionId;
+        TransactionId transactionId;
+        int64_t timestampUs;
+    } data;
+    char array[0];
+};
+
+static int32_t kNumIterationCount = 10;
+
+class BufferpoolTest {
+  public:
+    BufferpoolTest() : mConnectionValid(false), mManager(nullptr), mAllocator(nullptr) {
+        mConnectionId = -1;
+        mReceiverId = -1;
+    }
+
+    ~BufferpoolTest() {
+        if (mConnectionValid) {
+            mManager->close(mConnectionId);
+        }
+    }
+
+  protected:
+    bool mConnectionValid;
+    ConnectionId mConnectionId;
+    ConnectionId mReceiverId;
+
+    android::sp<ClientManager> mManager;
+    std::shared_ptr<BufferPoolAllocator> mAllocator;
+
+    void setupBufferpoolManager();
+};
+
+void BufferpoolTest::setupBufferpoolManager() {
+    // retrieving per process bufferpool object sp<ClientManager>
+    mManager = ClientManager::getInstance();
+    ASSERT_NE(mManager, nullptr) << "unable to get ClientManager\n";
+
+    mAllocator = std::make_shared<TestBufferPoolAllocator>();
+    ASSERT_NE(mAllocator, nullptr) << "unable to create TestBufferPoolAllocator\n";
+
+    // set-up local bufferpool connection for sender
+    ResultStatus status = mManager->create(mAllocator, &mConnectionId);
+    ASSERT_EQ(status, ResultStatus::OK)
+            << "unable to set-up local bufferpool connection for sender\n";
+    mConnectionValid = true;
+}
+
+class BufferpoolUnitTest : public BufferpoolTest, public ::testing::Test {
+  public:
+    virtual void SetUp() override { setupBufferpoolManager(); }
+
+    virtual void TearDown() override {}
+};
+
+class BufferpoolFunctionalityTest : public BufferpoolTest, public ::testing::Test {
+  public:
+    virtual void SetUp() override {
+        mReceiverPid = -1;
+
+        ASSERT_TRUE(pipe(mCommandPipeFds) == 0) << "pipe connection failed for commandPipe\n";
+        ASSERT_TRUE(pipe(mResultPipeFds) == 0) << "pipe connection failed for resultPipe\n";
+
+        mReceiverPid = fork();
+        ASSERT_TRUE(mReceiverPid >= 0) << "fork failed\n";
+
+        if (mReceiverPid == 0) {
+            doReceiver();
+            // In order to ignore gtest behaviour, wait for being killed from tearDown
+            pause();
+        }
+        setupBufferpoolManager();
+    }
+
+    virtual void TearDown() override {
+        if (mReceiverPid > 0) {
+            kill(mReceiverPid, SIGKILL);
+            int wstatus;
+            wait(&wstatus);
+        }
+    }
+
+  protected:
+    pid_t mReceiverPid;
+    int mCommandPipeFds[2];
+    int mResultPipeFds[2];
+
+    bool sendMessage(int* pipes, const PipeMessage& message) {
+        int ret = write(pipes[1], message.array, sizeof(PipeMessage));
+        return ret == sizeof(PipeMessage);
+    }
+
+    bool receiveMessage(int* pipes, PipeMessage* message) {
+        int ret = read(pipes[0], message->array, sizeof(PipeMessage));
+        return ret == sizeof(PipeMessage);
+    }
+
+    void doReceiver();
+};
+
+void BufferpoolFunctionalityTest::doReceiver() {
+    // Configures the threadpool used for handling incoming RPC calls in this process.
+    configureRpcThreadpool(1 /*threads*/, false /*willJoin*/);
+    bool receiverRunning = true;
+    while (receiverRunning) {
+        PipeMessage message;
+        receiveMessage(mCommandPipeFds, &message);
+        ResultStatus err = ResultStatus::OK;
+        switch (message.data.command) {
+            case PipeCommand::INIT: {
+                // receiver manager creation
+                mManager = ClientManager::getInstance();
+                if (!mManager) {
+                    message.data.command = PipeCommand::INIT_ERROR;
+                    sendMessage(mResultPipeFds, message);
+                    return;
+                }
+
+                android::status_t status = mManager->registerAsService();
+                if (status != android::OK) {
+                    message.data.command = PipeCommand::INIT_ERROR;
+                    sendMessage(mResultPipeFds, message);
+                    return;
+                }
+                message.data.command = PipeCommand::INIT_OK;
+                sendMessage(mResultPipeFds, message);
+                break;
+            }
+            case PipeCommand::TRANSFER: {
+                native_handle_t* receiveHandle = nullptr;
+                std::shared_ptr<BufferPoolData> receiveBuffer;
+                err = mManager->receive(message.data.connectionId, message.data.transactionId,
+                                        message.data.bufferId, message.data.timestampUs,
+                                        &receiveHandle, &receiveBuffer);
+                if (err != ResultStatus::OK) {
+                    message.data.command = PipeCommand::TRANSFER_ERROR;
+                    sendMessage(mResultPipeFds, message);
+                    return;
+                }
+                if (!TestBufferPoolAllocator::Verify(receiveHandle, message.data.memsetValue)) {
+                    message.data.command = PipeCommand::TRANSFER_ERROR;
+                    sendMessage(mResultPipeFds, message);
+                    return;
+                }
+                if (receiveHandle) {
+                    native_handle_close(receiveHandle);
+                    native_handle_delete(receiveHandle);
+                }
+                receiveHandle = nullptr;
+                receiveBuffer.reset();
+                message.data.command = PipeCommand::TRANSFER_OK;
+                sendMessage(mResultPipeFds, message);
+                break;
+            }
+            case PipeCommand::STOP: {
+                err = mManager->close(message.data.connectionId);
+                if (err != ResultStatus::OK) {
+                    message.data.command = PipeCommand::STOP_ERROR;
+                    sendMessage(mResultPipeFds, message);
+                    return;
+                }
+                message.data.command = PipeCommand::STOP_OK;
+                sendMessage(mResultPipeFds, message);
+                receiverRunning = false;
+                break;
+            }
+            default:
+                ALOGE("unknown command. try again");
+                break;
+        }
+    }
+}
+
+// Buffer allocation test.
+// Check whether each buffer allocation is done successfully with unique buffer id.
+TEST_F(BufferpoolUnitTest, AllocateBuffer) {
+    std::vector<uint8_t> vecParams;
+    getTestAllocatorParams(&vecParams);
+
+    std::vector<std::shared_ptr<BufferPoolData>> buffers{};
+    std::vector<native_handle_t*> allocHandle{};
+    ResultStatus status;
+    for (int i = 0; i < kNumIterationCount; ++i) {
+        native_handle_t* handle = nullptr;
+        std::shared_ptr<BufferPoolData> buffer{};
+        status = mManager->allocate(mConnectionId, vecParams, &handle, &buffer);
+        ASSERT_EQ(status, ResultStatus::OK) << "allocate failed for " << i << "iteration";
+
+        buffers.push_back(std::move(buffer));
+        if (handle) {
+            allocHandle.push_back(std::move(handle));
+        }
+    }
+
+    for (int i = 0; i < kNumIterationCount; ++i) {
+        for (int j = i + 1; j < kNumIterationCount; ++j) {
+            ASSERT_TRUE(buffers[i]->mId != buffers[j]->mId) << "allocated buffers are not unique";
+        }
+    }
+    // delete the buffer handles
+    for (auto handle : allocHandle) {
+        native_handle_close(handle);
+        native_handle_delete(handle);
+    }
+    // clear the vectors
+    buffers.clear();
+    allocHandle.clear();
+}
+
+// Buffer recycle test.
+// Check whether de-allocated buffers are recycled.
+TEST_F(BufferpoolUnitTest, RecycleBuffer) {
+    std::vector<uint8_t> vecParams;
+    getTestAllocatorParams(&vecParams);
+
+    ResultStatus status;
+    std::vector<BufferId> bid{};
+    std::vector<native_handle_t*> allocHandle{};
+    for (int i = 0; i < kNumIterationCount; ++i) {
+        native_handle_t* handle = nullptr;
+        std::shared_ptr<BufferPoolData> buffer;
+        status = mManager->allocate(mConnectionId, vecParams, &handle, &buffer);
+        ASSERT_EQ(status, ResultStatus::OK) << "allocate failed for " << i << "iteration";
+
+        bid.push_back(buffer->mId);
+        if (handle) {
+            allocHandle.push_back(std::move(handle));
+        }
+        buffer.reset();
+    }
+
+    std::unordered_set<BufferId> set(bid.begin(), bid.end());
+    ASSERT_EQ(set.size(), 1) << "buffers are not recycled properly";
+
+    // delete the buffer handles
+    for (auto handle : allocHandle) {
+        native_handle_close(handle);
+        native_handle_delete(handle);
+    }
+    allocHandle.clear();
+}
+
+// Validate cache evict and invalidate APIs.
+TEST_F(BufferpoolUnitTest, FlushTest) {
+    std::vector<uint8_t> vecParams;
+    getTestAllocatorParams(&vecParams);
+
+    ResultStatus status = mManager->registerSender(mManager, mConnectionId, &mReceiverId);
+    ASSERT_TRUE(status == ResultStatus::ALREADY_EXISTS && mReceiverId == mConnectionId);
+
+    // testing empty flush
+    status = mManager->flush(mConnectionId);
+    ASSERT_EQ(status, ResultStatus::OK) << "failed to flush connection : " << mConnectionId;
+
+    std::vector<std::shared_ptr<BufferPoolData>> senderBuffer{};
+    std::vector<native_handle_t*> allocHandle{};
+    std::vector<TransactionId> tid{};
+    std::vector<int64_t> timestampUs{};
+
+    std::map<TransactionId, BufferId> bufferMap{};
+
+    for (int i = 0; i < kNumIterationCount; i++) {
+        int64_t postUs;
+        TransactionId transactionId;
+        native_handle_t* handle = nullptr;
+        std::shared_ptr<BufferPoolData> buffer{};
+        status = mManager->allocate(mConnectionId, vecParams, &handle, &buffer);
+        ASSERT_EQ(status, ResultStatus::OK) << "allocate failed for " << i << " iteration";
+
+        ASSERT_TRUE(TestBufferPoolAllocator::Fill(handle, i));
+
+        status = mManager->postSend(mReceiverId, buffer, &transactionId, &postUs);
+        ASSERT_EQ(status, ResultStatus::OK) << "unable to post send transaction on bufferpool";
+
+        timestampUs.push_back(postUs);
+        tid.push_back(transactionId);
+        bufferMap.insert({transactionId, buffer->mId});
+
+        senderBuffer.push_back(std::move(buffer));
+        if (handle) {
+            allocHandle.push_back(std::move(handle));
+        }
+        buffer.reset();
+    }
+
+    status = mManager->flush(mConnectionId);
+    ASSERT_EQ(status, ResultStatus::OK) << "failed to flush connection : " << mConnectionId;
+
+    std::shared_ptr<BufferPoolData> receiverBuffer{};
+    native_handle_t* recvHandle = nullptr;
+    for (int i = 0; i < kNumIterationCount; i++) {
+        status = mManager->receive(mReceiverId, tid[i], senderBuffer[i]->mId, timestampUs[i],
+                                   &recvHandle, &receiverBuffer);
+        ASSERT_EQ(status, ResultStatus::OK) << "receive failed for buffer " << senderBuffer[i]->mId;
+
+        // find the buffer id from transaction id
+        auto findIt = bufferMap.find(tid[i]);
+        ASSERT_NE(findIt, bufferMap.end()) << "inconsistent buffer mapping";
+
+        // buffer id received must be same as the buffer id sent
+        ASSERT_EQ(findIt->second, receiverBuffer->mId) << "invalid buffer received";
+
+        ASSERT_TRUE(TestBufferPoolAllocator::Verify(recvHandle, i))
+                << "Message received not same as that sent";
+
+        bufferMap.erase(findIt);
+        if (recvHandle) {
+            native_handle_close(recvHandle);
+            native_handle_delete(recvHandle);
+        }
+        recvHandle = nullptr;
+        receiverBuffer.reset();
+    }
+
+    ASSERT_EQ(bufferMap.size(), 0) << "buffers received is less than the number of buffers sent";
+
+    for (auto handle : allocHandle) {
+        native_handle_close(handle);
+        native_handle_delete(handle);
+    }
+    allocHandle.clear();
+    senderBuffer.clear();
+    timestampUs.clear();
+}
+
+// Buffer transfer test between processes.
+TEST_F(BufferpoolFunctionalityTest, TransferBuffer) {
+    // initialize the receiver
+    PipeMessage message;
+    message.data.command = PipeCommand::INIT;
+    sendMessage(mCommandPipeFds, message);
+    ASSERT_TRUE(receiveMessage(mResultPipeFds, &message)) << "receiveMessage failed\n";
+    ASSERT_EQ(message.data.command, PipeCommand::INIT_OK) << "receiver init failed";
+
+    android::sp<IClientManager> receiver = IClientManager::getService();
+    ASSERT_NE(receiver, nullptr) << "getService failed for receiver\n";
+
+    ConnectionId receiverId;
+    ResultStatus status = mManager->registerSender(receiver, mConnectionId, &receiverId);
+    ASSERT_EQ(status, ResultStatus::OK)
+            << "registerSender failed for connection id " << mConnectionId << "\n";
+
+    std::vector<uint8_t> vecParams;
+    getTestAllocatorParams(&vecParams);
+
+    for (int i = 0; i < kNumIterationCount; ++i) {
+        native_handle_t* handle = nullptr;
+        std::shared_ptr<BufferPoolData> buffer;
+        status = mManager->allocate(mConnectionId, vecParams, &handle, &buffer);
+        ASSERT_EQ(status, ResultStatus::OK) << "allocate failed for " << i << "iteration";
+
+        ASSERT_TRUE(TestBufferPoolAllocator::Fill(handle, i))
+                << "Fill fail for buffer handle " << handle << "\n";
+
+        // send the buffer to the receiver
+        int64_t postUs;
+        TransactionId transactionId;
+        status = mManager->postSend(receiverId, buffer, &transactionId, &postUs);
+        ASSERT_EQ(status, ResultStatus::OK)
+                << "postSend failed for receiver " << receiverId << "\n";
+
+        // PipeMessage message;
+        message.data.command = PipeCommand::TRANSFER;
+        message.data.memsetValue = i;
+        message.data.bufferId = buffer->mId;
+        message.data.connectionId = receiverId;
+        message.data.transactionId = transactionId;
+        message.data.timestampUs = postUs;
+        sendMessage(mCommandPipeFds, message);
+        // delete buffer handle
+        if (handle) {
+            native_handle_close(handle);
+            native_handle_delete(handle);
+        }
+        ASSERT_TRUE(receiveMessage(mResultPipeFds, &message)) << "receiveMessage failed\n";
+        ASSERT_EQ(message.data.command, PipeCommand::TRANSFER_OK)
+                << "received error during buffer transfer\n";
+    }
+    message.data.command = PipeCommand::STOP;
+    sendMessage(mCommandPipeFds, message);
+    ASSERT_TRUE(receiveMessage(mResultPipeFds, &message)) << "receiveMessage failed\n";
+    ASSERT_EQ(message.data.command, PipeCommand::STOP_OK)
+            << "received error during buffer transfer\n";
+}
+
+/* Validate bufferpool for following corner cases:
+ 1. invalid connectionID
+ 2. invalid receiver
+ 3. when sender is not registered
+ 4. when connection is closed
+*/
+// TODO: Enable when the issue in b/212196495 is fixed
+TEST_F(BufferpoolFunctionalityTest, DISABLED_ValidityTest) {
+    std::vector<uint8_t> vecParams;
+    getTestAllocatorParams(&vecParams);
+
+    std::shared_ptr<BufferPoolData> senderBuffer;
+    native_handle_t* allocHandle = nullptr;
+
+    // call allocate() on a random connection id
+    ConnectionId randomId = rand();
+    ResultStatus status = mManager->allocate(randomId, vecParams, &allocHandle, &senderBuffer);
+    EXPECT_TRUE(status == ResultStatus::NOT_FOUND);
+
+    // initialize the receiver
+    PipeMessage message;
+    message.data.command = PipeCommand::INIT;
+    sendMessage(mCommandPipeFds, message);
+    ASSERT_TRUE(receiveMessage(mResultPipeFds, &message)) << "receiveMessage failed\n";
+    ASSERT_EQ(message.data.command, PipeCommand::INIT_OK) << "receiver init failed";
+
+    allocHandle = nullptr;
+    senderBuffer.reset();
+    status = mManager->allocate(mConnectionId, vecParams, &allocHandle, &senderBuffer);
+
+    ASSERT_TRUE(TestBufferPoolAllocator::Fill(allocHandle, 0x77));
+
+    // send buffers w/o registering sender
+    int64_t postUs;
+    TransactionId transactionId;
+
+    // random receiver
+    status = mManager->postSend(randomId, senderBuffer, &transactionId, &postUs);
+    ASSERT_NE(status, ResultStatus::OK) << "bufferpool shouldn't allow send on random receiver";
+
+    // establish connection
+    android::sp<IClientManager> receiver = IClientManager::getService();
+    ASSERT_NE(receiver, nullptr) << "getService failed for receiver\n";
+
+    ConnectionId receiverId;
+    status = mManager->registerSender(receiver, mConnectionId, &receiverId);
+    ASSERT_EQ(status, ResultStatus::OK)
+            << "registerSender failed for connection id " << mConnectionId << "\n";
+
+    allocHandle = nullptr;
+    senderBuffer.reset();
+    status = mManager->allocate(mConnectionId, vecParams, &allocHandle, &senderBuffer);
+    ASSERT_EQ(status, ResultStatus::OK) << "allocate failed for connection " << mConnectionId;
+
+    ASSERT_TRUE(TestBufferPoolAllocator::Fill(allocHandle, 0x88));
+
+    // send the buffer to the receiver
+    status = mManager->postSend(receiverId, senderBuffer, &transactionId, &postUs);
+    ASSERT_EQ(status, ResultStatus::OK) << "postSend failed for receiver " << receiverId << "\n";
+
+    // PipeMessage message;
+    message.data.command = PipeCommand::TRANSFER;
+    message.data.memsetValue = 0x88;
+    message.data.bufferId = senderBuffer->mId;
+    message.data.connectionId = receiverId;
+    message.data.transactionId = transactionId;
+    message.data.timestampUs = postUs;
+    sendMessage(mCommandPipeFds, message);
+    ASSERT_TRUE(receiveMessage(mResultPipeFds, &message)) << "receiveMessage failed\n";
+    ASSERT_EQ(message.data.command, PipeCommand::TRANSFER_OK)
+            << "received error during buffer transfer\n";
+
+    if (allocHandle) {
+        native_handle_close(allocHandle);
+        native_handle_delete(allocHandle);
+    }
+
+    message.data.command = PipeCommand::STOP;
+    sendMessage(mCommandPipeFds, message);
+    ASSERT_TRUE(receiveMessage(mResultPipeFds, &message)) << "receiveMessage failed\n";
+    ASSERT_EQ(message.data.command, PipeCommand::STOP_OK)
+            << "received error during buffer transfer\n";
+
+    // try to send msg to closed connection
+    status = mManager->postSend(receiverId, senderBuffer, &transactionId, &postUs);
+    ASSERT_NE(status, ResultStatus::OK) << "bufferpool shouldn't allow send on closed connection";
+}
+
+int main(int argc, char** argv) {
+    android::hardware::details::setTrebleTestingOverride(true);
+    ::testing::InitGoogleTest(&argc, argv);
+    int status = RUN_ALL_TESTS();
+    ALOGV("Test result = %d\n", status);
+    return status;
+}
diff --git a/media/bufferpool/2.0/tests/README.md b/media/bufferpool/2.0/tests/README.md
new file mode 100644
index 0000000..5efd966
--- /dev/null
+++ b/media/bufferpool/2.0/tests/README.md
@@ -0,0 +1,33 @@
+## Media Testing ##
+---
+#### Bufferpool :
+The Bufferpool Test Suite validates bufferpool library in android.
+
+Run the following steps to build the test suite:
+```
+m BufferpoolUnitTest
+```
+
+The 32-bit binaries will be created in the following path : ${OUT}/data/nativetest/
+
+The 64-bit binaries will be created in the following path : ${OUT}/data/nativetest64/
+
+To test 64-bit binary push binaries from nativetest64.
+```
+adb push ${OUT}/data/nativetest64/BufferpoolUnitTest/BufferpoolUnitTest /data/local/tmp/
+```
+
+To test 32-bit binary push binaries from nativetest.
+```
+adb push ${OUT}/data/nativetest/BufferpoolUnitTest/BufferpoolUnitTest /data/local/tmp/
+```
+
+usage: BufferpoolUnitTest
+```
+adb shell /data/local/tmp/BufferpoolUnitTest
+```
+Alternatively, the test can also be run using atest command.
+
+```
+atest BufferpoolUnitTest
+```
diff --git a/media/codec2/TEST_MAPPING b/media/codec2/TEST_MAPPING
index 6ac4210..90bb054 100644
--- a/media/codec2/TEST_MAPPING
+++ b/media/codec2/TEST_MAPPING
@@ -8,7 +8,18 @@
   ],
   "presubmit-large": [
     {
-      "name": "CtsMediaTestCases",
+      "name": "CtsMediaMiscTestCases",
+      "options": [
+        {
+          "include-annotation": "android.platform.test.annotations.Presubmit"
+        },
+        {
+          "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
+        }
+      ]
+    },
+    {
+      "name": "CtsMediaAudioTestCases",
       "options": [
         {
           "include-annotation": "android.platform.test.annotations.Presubmit"
@@ -18,10 +29,54 @@
         },
         // TODO: b/149314419
         {
-          "exclude-filter": "android.media.cts.AudioPlaybackCaptureTest"
+          "exclude-filter": "android.media.audio.cts.AudioPlaybackCaptureTest"
         },
         {
-          "exclude-filter": "android.media.cts.AudioRecordTest"
+          "exclude-filter": "android.media.audio.cts.AudioRecordTest"
+        }
+      ]
+    },
+    {
+      "name": "CtsMediaDecoderTestCases",
+      "options": [
+        {
+          "include-annotation": "android.platform.test.annotations.Presubmit"
+        },
+        {
+          "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
+        }
+      ]
+    },
+    {
+      "name": "CtsMediaEncoderTestCases",
+      "options": [
+        {
+          "include-annotation": "android.platform.test.annotations.Presubmit"
+        },
+        {
+          "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
+        }
+      ]
+    },
+    {
+      "name": "CtsMediaCodecTestCases",
+      "options": [
+        {
+          "include-annotation": "android.platform.test.annotations.Presubmit"
+        },
+        {
+          "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
+        }
+      ]
+    },
+    {
+      "name": "CtsMediaPlayerTestCases",
+      "options": [
+        {
+          "include-annotation": "android.platform.test.annotations.Presubmit"
+        },
+        {
+          "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
         }
       ]
     }
diff --git a/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.cpp b/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.cpp
index bb63e1f..7afea91 100644
--- a/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.cpp
+++ b/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.cpp
@@ -225,7 +225,7 @@
         work->result = C2_CORRUPTED;
         return;
     }
-    uint64_t outTimeStamp =
+    int64_t outTimeStamp =
         mProcessedSamples * 1000000ll / mIntf->getSampleRate();
     size_t inPos = 0;
     size_t outPos = 0;
@@ -266,7 +266,7 @@
     ALOGV("causal sample size %d", mFilledLen);
     if (mIsFirst && outPos != 0) {
         mIsFirst = false;
-        mAnchorTimeStamp = work->input.ordinal.timestamp.peekull();
+        mAnchorTimeStamp = work->input.ordinal.timestamp.peekll();
     }
     fillEmptyWork(work);
     if (outPos != 0) {
diff --git a/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.h b/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.h
index 6ab14db..4920b23 100644
--- a/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.h
+++ b/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.h
@@ -54,7 +54,7 @@
     bool mIsFirst;
     bool mSignalledError;
     bool mSignalledOutputEos;
-    uint64_t mAnchorTimeStamp;
+    int64_t mAnchorTimeStamp;
     uint64_t mProcessedSamples;
     int32_t mFilledLen;
     int16_t mInputFrame[kNumSamplesPerFrame];
diff --git a/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.cpp b/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.cpp
index 84728ae..29b1040 100644
--- a/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.cpp
+++ b/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.cpp
@@ -307,7 +307,7 @@
         work->result = wView.error();
         return;
     }
-    uint64_t outTimeStamp =
+    int64_t outTimeStamp =
         mProcessedSamples * 1000000ll / mIntf->getSampleRate();
     size_t inPos = 0;
     size_t outPos = 0;
@@ -341,7 +341,7 @@
     ALOGV("causal sample size %d", mFilledLen);
     if (mIsFirst && outPos != 0) {
         mIsFirst = false;
-        mAnchorTimeStamp = work->input.ordinal.timestamp.peekull();
+        mAnchorTimeStamp = work->input.ordinal.timestamp.peekll();
     }
     fillEmptyWork(work);
     if (outPos != 0) {
diff --git a/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.h b/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.h
index 0cc9e9f..72990c3 100644
--- a/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.h
+++ b/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.h
@@ -55,7 +55,7 @@
     bool mIsFirst;
     bool mSignalledError;
     bool mSignalledOutputEos;
-    uint64_t mAnchorTimeStamp;
+    int64_t mAnchorTimeStamp;
     uint64_t mProcessedSamples;
     int32_t mFilledLen;
     int16_t mInputFrame[kNumSamplesPerFrame];
diff --git a/media/codec2/components/aom/C2SoftAomDec.cpp b/media/codec2/components/aom/C2SoftAomDec.cpp
index c08cd59..39bbe1c 100644
--- a/media/codec2/components/aom/C2SoftAomDec.cpp
+++ b/media/codec2/components/aom/C2SoftAomDec.cpp
@@ -505,124 +505,6 @@
     }
 }
 
-static void copyOutputBufferToYuvPlanarFrame(
-        uint8_t *dstY, uint8_t *dstU, uint8_t *dstV,
-        const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
-        size_t srcYStride, size_t srcUStride, size_t srcVStride,
-        size_t dstYStride, size_t dstUVStride,
-        uint32_t width, uint32_t height) {
-
-    for (size_t i = 0; i < height; ++i) {
-        memcpy(dstY, srcY, width);
-        srcY += srcYStride;
-        dstY += dstYStride;
-    }
-
-    for (size_t i = 0; i < height / 2; ++i) {
-        memcpy(dstV, srcV, width / 2);
-        srcV += srcVStride;
-        dstV += dstUVStride;
-    }
-
-    for (size_t i = 0; i < height / 2; ++i) {
-        memcpy(dstU, srcU, width / 2);
-        srcU += srcUStride;
-        dstU += dstUVStride;
-    }
-}
-
-static void convertYUV420Planar16ToY410(uint32_t *dst,
-        const uint16_t *srcY, const uint16_t *srcU, const uint16_t *srcV,
-        size_t srcYStride, size_t srcUStride, size_t srcVStride,
-        size_t dstStride, size_t width, size_t height) {
-
-    // Converting two lines at a time, slightly faster
-    for (size_t y = 0; y < height; y += 2) {
-        uint32_t *dstTop = (uint32_t *) dst;
-        uint32_t *dstBot = (uint32_t *) (dst + dstStride);
-        uint16_t *ySrcTop = (uint16_t*) srcY;
-        uint16_t *ySrcBot = (uint16_t*) (srcY + srcYStride);
-        uint16_t *uSrc = (uint16_t*) srcU;
-        uint16_t *vSrc = (uint16_t*) srcV;
-
-        uint32_t u01, v01, y01, y23, y45, y67, uv0, uv1;
-        size_t x = 0;
-        for (; x < width - 3; x += 4) {
-
-            u01 = *((uint32_t*)uSrc); uSrc += 2;
-            v01 = *((uint32_t*)vSrc); vSrc += 2;
-
-            y01 = *((uint32_t*)ySrcTop); ySrcTop += 2;
-            y23 = *((uint32_t*)ySrcTop); ySrcTop += 2;
-            y45 = *((uint32_t*)ySrcBot); ySrcBot += 2;
-            y67 = *((uint32_t*)ySrcBot); ySrcBot += 2;
-
-            uv0 = (u01 & 0x3FF) | ((v01 & 0x3FF) << 20);
-            uv1 = (u01 >> 16) | ((v01 >> 16) << 20);
-
-            *dstTop++ = 3 << 30 | ((y01 & 0x3FF) << 10) | uv0;
-            *dstTop++ = 3 << 30 | ((y01 >> 16) << 10) | uv0;
-            *dstTop++ = 3 << 30 | ((y23 & 0x3FF) << 10) | uv1;
-            *dstTop++ = 3 << 30 | ((y23 >> 16) << 10) | uv1;
-
-            *dstBot++ = 3 << 30 | ((y45 & 0x3FF) << 10) | uv0;
-            *dstBot++ = 3 << 30 | ((y45 >> 16) << 10) | uv0;
-            *dstBot++ = 3 << 30 | ((y67 & 0x3FF) << 10) | uv1;
-            *dstBot++ = 3 << 30 | ((y67 >> 16) << 10) | uv1;
-        }
-
-        // There should be at most 2 more pixels to process. Note that we don't
-        // need to consider odd case as the buffer is always aligned to even.
-        if (x < width) {
-            u01 = *uSrc;
-            v01 = *vSrc;
-            y01 = *((uint32_t*)ySrcTop);
-            y45 = *((uint32_t*)ySrcBot);
-            uv0 = (u01 & 0x3FF) | ((v01 & 0x3FF) << 20);
-            *dstTop++ = ((y01 & 0x3FF) << 10) | uv0;
-            *dstTop++ = ((y01 >> 16) << 10) | uv0;
-            *dstBot++ = ((y45 & 0x3FF) << 10) | uv0;
-            *dstBot++ = ((y45 >> 16) << 10) | uv0;
-        }
-
-        srcY += srcYStride * 2;
-        srcU += srcUStride;
-        srcV += srcVStride;
-        dst += dstStride * 2;
-    }
-
-    return;
-}
-
-static void convertYUV420Planar16ToYUV420Planar(
-        uint8_t *dstY, uint8_t *dstU, uint8_t *dstV,
-        const uint16_t *srcY, const uint16_t *srcU, const uint16_t *srcV,
-        size_t srcYStride, size_t srcUStride, size_t srcVStride,
-        size_t dstYStride, size_t dstUVStride,
-        size_t width, size_t height) {
-
-    for (size_t y = 0; y < height; ++y) {
-        for (size_t x = 0; x < width; ++x) {
-            dstY[x] = (uint8_t)(srcY[x] >> 2);
-        }
-
-        srcY += srcYStride;
-        dstY += dstYStride;
-    }
-
-    for (size_t y = 0; y < (height + 1) / 2; ++y) {
-        for (size_t x = 0; x < (width + 1) / 2; ++x) {
-            dstU[x] = (uint8_t)(srcU[x] >> 2);
-            dstV[x] = (uint8_t)(srcV[x] >> 2);
-        }
-
-        srcU += srcUStride;
-        srcV += srcVStride;
-        dstU += dstUVStride;
-        dstV += dstUVStride;
-    }
-    return;
-}
 bool C2SoftAomDec::outputBuffer(
         const std::shared_ptr<C2BlockPool> &pool,
         const std::unique_ptr<C2Work> &work)
@@ -706,26 +588,20 @@
         const uint16_t *srcV = (const uint16_t *)img->planes[AOM_PLANE_V];
 
         if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
-            convertYUV420Planar16ToY410((uint32_t *)dstY, srcY, srcU, srcV, srcYStride / 2,
-                                    srcUStride / 2, srcVStride / 2,
-                                    dstYStride / sizeof(uint32_t),
-                                    mWidth, mHeight);
+            convertYUV420Planar16ToY410OrRGBA1010102(
+                    (uint32_t *)dstY, srcY, srcU, srcV, srcYStride / 2, srcUStride / 2,
+                    srcVStride / 2, dstYStride / sizeof(uint32_t), mWidth, mHeight);
         } else {
-            convertYUV420Planar16ToYUV420Planar(dstY, dstU, dstV,
-                                    srcY, srcU, srcV,
-                                    srcYStride / 2, srcUStride / 2, srcVStride / 2,
-                                    dstYStride, dstUVStride,
-                                    mWidth, mHeight);
+            convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride / 2,
+                                        srcUStride / 2, srcVStride / 2, dstYStride, dstUVStride,
+                                        mWidth, mHeight);
         }
     } else {
         const uint8_t *srcY = (const uint8_t *)img->planes[AOM_PLANE_Y];
         const uint8_t *srcU = (const uint8_t *)img->planes[AOM_PLANE_U];
         const uint8_t *srcV = (const uint8_t *)img->planes[AOM_PLANE_V];
-        copyOutputBufferToYuvPlanarFrame(
-                dstY, dstU, dstV, srcY, srcU, srcV,
-                srcYStride, srcUStride, srcVStride,
-                dstYStride, dstUVStride,
-                mWidth, mHeight);
+        convertYUV420Planar8ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
+                                   srcVStride, dstYStride, dstUVStride, mWidth, mHeight);
     }
     finishWork(*(int64_t*)img->user_priv, work, std::move(block));
     block = nullptr;
diff --git a/media/codec2/components/avc/C2SoftAvcDec.cpp b/media/codec2/components/avc/C2SoftAvcDec.cpp
index e8287f9..cc4517d 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.cpp
+++ b/media/codec2/components/avc/C2SoftAvcDec.cpp
@@ -511,7 +511,7 @@
 status_t C2SoftAvcDec::initDecoder() {
     if (OK != createDecoder()) return UNKNOWN_ERROR;
     mNumCores = MIN(getCpuCoreCount(), MAX_NUM_CORES);
-    mStride = ALIGN32(mWidth);
+    mStride = ALIGN128(mWidth);
     mSignalledError = false;
     resetPlugin();
     (void) setNumCores();
@@ -777,20 +777,20 @@
         return C2_CORRUPTED;
     }
     if (mOutBlock &&
-            (mOutBlock->width() != ALIGN32(mWidth) || mOutBlock->height() != mHeight)) {
+            (mOutBlock->width() != ALIGN128(mWidth) || mOutBlock->height() != mHeight)) {
         mOutBlock.reset();
     }
     if (!mOutBlock) {
         uint32_t format = HAL_PIXEL_FORMAT_YV12;
         C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
         c2_status_t err =
-            pool->fetchGraphicBlock(ALIGN32(mWidth), mHeight, format, usage, &mOutBlock);
+            pool->fetchGraphicBlock(ALIGN128(mWidth), mHeight, format, usage, &mOutBlock);
         if (err != C2_OK) {
             ALOGE("fetchGraphicBlock for Output failed with status %d", err);
             return err;
         }
         ALOGV("provided (%dx%d) required (%dx%d)",
-              mOutBlock->width(), mOutBlock->height(), ALIGN32(mWidth), mHeight);
+              mOutBlock->width(), mOutBlock->height(), ALIGN128(mWidth), mHeight);
     }
 
     return C2_OK;
@@ -928,7 +928,7 @@
         if (0 < ps_decode_op->u4_pic_wd && 0 < ps_decode_op->u4_pic_ht) {
             if (mHeaderDecoded == false) {
                 mHeaderDecoded = true;
-                mStride = ALIGN32(ps_decode_op->u4_pic_wd);
+                mStride = ALIGN128(ps_decode_op->u4_pic_wd);
                 setParams(mStride, IVD_DECODE_FRAME);
             }
             if (ps_decode_op->u4_pic_wd != mWidth || ps_decode_op->u4_pic_ht != mHeight) {
diff --git a/media/codec2/components/avc/C2SoftAvcDec.h b/media/codec2/components/avc/C2SoftAvcDec.h
index 5c07d29..59d5184 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.h
+++ b/media/codec2/components/avc/C2SoftAvcDec.h
@@ -38,7 +38,7 @@
 #define ivdext_ctl_set_num_cores_op_t   ih264d_ctl_set_num_cores_op_t
 #define ivdext_ctl_get_vui_params_ip_t  ih264d_ctl_get_vui_params_ip_t
 #define ivdext_ctl_get_vui_params_op_t  ih264d_ctl_get_vui_params_op_t
-#define ALIGN32(x)                      ((((x) + 31) >> 5) << 5)
+#define ALIGN128(x)                     ((((x) + 127) >> 7) << 7)
 #define MAX_NUM_CORES                   4
 #define IVDEXT_CMD_CTL_SET_NUM_CORES    \
         (IVD_CONTROL_API_COMMAND_TYPE_T)IH264D_CMD_CTL_SET_NUM_CORES
diff --git a/media/codec2/components/base/SimpleC2Component.cpp b/media/codec2/components/base/SimpleC2Component.cpp
index 6c4b7d9..1eec8f9 100644
--- a/media/codec2/components/base/SimpleC2Component.cpp
+++ b/media/codec2/components/base/SimpleC2Component.cpp
@@ -29,7 +29,297 @@
 #include <SimpleC2Component.h>
 
 namespace android {
+constexpr uint8_t kNeutralUVBitDepth8 = 128;
+constexpr uint16_t kNeutralUVBitDepth10 = 512;
 
+bool isAtLeastT() {
+    char deviceCodeName[PROP_VALUE_MAX];
+    __system_property_get("ro.build.version.codename", deviceCodeName);
+    return android_get_device_api_level() >= __ANDROID_API_T__ ||
+           !strcmp(deviceCodeName, "Tiramisu");
+}
+
+void convertYUV420Planar8ToYV12(uint8_t *dstY, uint8_t *dstU, uint8_t *dstV, const uint8_t *srcY,
+                                const uint8_t *srcU, const uint8_t *srcV, size_t srcYStride,
+                                size_t srcUStride, size_t srcVStride, size_t dstYStride,
+                                size_t dstUVStride, uint32_t width, uint32_t height,
+                                bool isMonochrome) {
+    for (size_t i = 0; i < height; ++i) {
+        memcpy(dstY, srcY, width);
+        srcY += srcYStride;
+        dstY += dstYStride;
+    }
+
+    if (isMonochrome) {
+        // Fill with neutral U/V values.
+        for (size_t i = 0; i < height / 2; ++i) {
+            memset(dstV, kNeutralUVBitDepth8, width / 2);
+            memset(dstU, kNeutralUVBitDepth8, width / 2);
+            dstV += dstUVStride;
+            dstU += dstUVStride;
+        }
+        return;
+    }
+
+    for (size_t i = 0; i < height / 2; ++i) {
+        memcpy(dstV, srcV, width / 2);
+        srcV += srcVStride;
+        dstV += dstUVStride;
+    }
+
+    for (size_t i = 0; i < height / 2; ++i) {
+        memcpy(dstU, srcU, width / 2);
+        srcU += srcUStride;
+        dstU += dstUVStride;
+    }
+}
+
+void convertYUV420Planar16ToY410(uint32_t *dst, const uint16_t *srcY, const uint16_t *srcU,
+                                 const uint16_t *srcV, size_t srcYStride, size_t srcUStride,
+                                 size_t srcVStride, size_t dstStride, size_t width, size_t height) {
+    // Converting two lines at a time, slightly faster
+    for (size_t y = 0; y < height; y += 2) {
+        uint32_t *dstTop = (uint32_t *)dst;
+        uint32_t *dstBot = (uint32_t *)(dst + dstStride);
+        uint16_t *ySrcTop = (uint16_t *)srcY;
+        uint16_t *ySrcBot = (uint16_t *)(srcY + srcYStride);
+        uint16_t *uSrc = (uint16_t *)srcU;
+        uint16_t *vSrc = (uint16_t *)srcV;
+
+        uint32_t u01, v01, y01, y23, y45, y67, uv0, uv1;
+        size_t x = 0;
+        for (; x < width - 3; x += 4) {
+            u01 = *((uint32_t *)uSrc);
+            uSrc += 2;
+            v01 = *((uint32_t *)vSrc);
+            vSrc += 2;
+
+            y01 = *((uint32_t *)ySrcTop);
+            ySrcTop += 2;
+            y23 = *((uint32_t *)ySrcTop);
+            ySrcTop += 2;
+            y45 = *((uint32_t *)ySrcBot);
+            ySrcBot += 2;
+            y67 = *((uint32_t *)ySrcBot);
+            ySrcBot += 2;
+
+            uv0 = (u01 & 0x3FF) | ((v01 & 0x3FF) << 20);
+            uv1 = (u01 >> 16) | ((v01 >> 16) << 20);
+
+            *dstTop++ = 3 << 30 | ((y01 & 0x3FF) << 10) | uv0;
+            *dstTop++ = 3 << 30 | ((y01 >> 16) << 10) | uv0;
+            *dstTop++ = 3 << 30 | ((y23 & 0x3FF) << 10) | uv1;
+            *dstTop++ = 3 << 30 | ((y23 >> 16) << 10) | uv1;
+
+            *dstBot++ = 3 << 30 | ((y45 & 0x3FF) << 10) | uv0;
+            *dstBot++ = 3 << 30 | ((y45 >> 16) << 10) | uv0;
+            *dstBot++ = 3 << 30 | ((y67 & 0x3FF) << 10) | uv1;
+            *dstBot++ = 3 << 30 | ((y67 >> 16) << 10) | uv1;
+        }
+
+        // There should be at most 2 more pixels to process. Note that we don't
+        // need to consider odd case as the buffer is always aligned to even.
+        if (x < width) {
+            u01 = *uSrc;
+            v01 = *vSrc;
+            y01 = *((uint32_t *)ySrcTop);
+            y45 = *((uint32_t *)ySrcBot);
+            uv0 = (u01 & 0x3FF) | ((v01 & 0x3FF) << 20);
+            *dstTop++ = ((y01 & 0x3FF) << 10) | uv0;
+            *dstTop++ = ((y01 >> 16) << 10) | uv0;
+            *dstBot++ = ((y45 & 0x3FF) << 10) | uv0;
+            *dstBot++ = ((y45 >> 16) << 10) | uv0;
+        }
+
+        srcY += srcYStride * 2;
+        srcU += srcUStride;
+        srcV += srcVStride;
+        dst += dstStride * 2;
+    }
+}
+#define CLIP3(min, v, max) (((v) < (min)) ? (min) : (((max) > (v)) ? (v) : (max)))
+void convertYUV420Planar16ToRGBA1010102(uint32_t *dst, const uint16_t *srcY, const uint16_t *srcU,
+                                        const uint16_t *srcV, size_t srcYStride, size_t srcUStride,
+                                        size_t srcVStride, size_t dstStride, size_t width,
+                                        size_t height) {
+    // Converting two lines at a time, slightly faster
+    for (size_t y = 0; y < height; y += 2) {
+        uint32_t *dstTop = (uint32_t *)dst;
+        uint32_t *dstBot = (uint32_t *)(dst + dstStride);
+        uint16_t *ySrcTop = (uint16_t *)srcY;
+        uint16_t *ySrcBot = (uint16_t *)(srcY + srcYStride);
+        uint16_t *uSrc = (uint16_t *)srcU;
+        uint16_t *vSrc = (uint16_t *)srcV;
+
+        // BT.2020 Limited Range conversion
+
+        // B = 1.168  *(Y - 64) + 2.148  *(U - 512)
+        // G = 1.168  *(Y - 64) - 0.652  *(V - 512) - 0.188  *(U - 512)
+        // R = 1.168  *(Y - 64) + 1.683  *(V - 512)
+
+        // B = 1196/1024  *(Y - 64) + 2200/1024  *(U - 512)
+        // G = .................... -  668/1024  *(V - 512) - 192/1024  *(U - 512)
+        // R = .................... + 1723/1024  *(V - 512)
+
+        // min_B = (1196  *(- 64) + 2200  *(- 512)) / 1024 = -1175
+        // min_G = (1196  *(- 64) - 668  *(1023 - 512) - 192  *(1023 - 512)) / 1024 = -504
+        // min_R = (1196  *(- 64) + 1723  *(- 512)) / 1024 = -937
+
+        // max_B = (1196  *(1023 - 64) + 2200  *(1023 - 512)) / 1024 = 2218
+        // max_G = (1196  *(1023 - 64) - 668  *(- 512) - 192  *(- 512)) / 1024 = 1551
+        // max_R = (1196  *(1023 - 64) + 1723  *(1023 - 512)) / 1024 = 1980
+
+        int32_t mY = 1196, mU_B = 2200, mV_G = -668, mV_R = 1723, mU_G = -192;
+        for (size_t x = 0; x < width; x += 2) {
+            int32_t u, v, y00, y01, y10, y11;
+            u = *uSrc - 512;
+            uSrc += 1;
+            v = *vSrc - 512;
+            vSrc += 1;
+
+            y00 = *ySrcTop - 64;
+            ySrcTop += 1;
+            y01 = *ySrcTop - 64;
+            ySrcTop += 1;
+            y10 = *ySrcBot - 64;
+            ySrcBot += 1;
+            y11 = *ySrcBot - 64;
+            ySrcBot += 1;
+
+            int32_t u_b = u * mU_B;
+            int32_t u_g = u * mU_G;
+            int32_t v_g = v * mV_G;
+            int32_t v_r = v * mV_R;
+
+            int32_t yMult, b, g, r;
+            yMult = y00 * mY;
+            b = (yMult + u_b) / 1024;
+            g = (yMult + v_g + u_g) / 1024;
+            r = (yMult + v_r) / 1024;
+            b = CLIP3(0, b, 1023);
+            g = CLIP3(0, g, 1023);
+            r = CLIP3(0, r, 1023);
+            *dstTop++ = 3 << 30 | (b << 20) | (g << 10) | r;
+
+            yMult = y01 * mY;
+            b = (yMult + u_b) / 1024;
+            g = (yMult + v_g + u_g) / 1024;
+            r = (yMult + v_r) / 1024;
+            b = CLIP3(0, b, 1023);
+            g = CLIP3(0, g, 1023);
+            r = CLIP3(0, r, 1023);
+            *dstTop++ = 3 << 30 | (b << 20) | (g << 10) | r;
+
+            yMult = y10 * mY;
+            b = (yMult + u_b) / 1024;
+            g = (yMult + v_g + u_g) / 1024;
+            r = (yMult + v_r) / 1024;
+            b = CLIP3(0, b, 1023);
+            g = CLIP3(0, g, 1023);
+            r = CLIP3(0, r, 1023);
+            *dstBot++ = 3 << 30 | (b << 20) | (g << 10) | r;
+
+            yMult = y11 * mY;
+            b = (yMult + u_b) / 1024;
+            g = (yMult + v_g + u_g) / 1024;
+            r = (yMult + v_r) / 1024;
+            b = CLIP3(0, b, 1023);
+            g = CLIP3(0, g, 1023);
+            r = CLIP3(0, r, 1023);
+            *dstBot++ = 3 << 30 | (b << 20) | (g << 10) | r;
+        }
+
+        srcY += srcYStride * 2;
+        srcU += srcUStride;
+        srcV += srcVStride;
+        dst += dstStride * 2;
+    }
+}
+
+void convertYUV420Planar16ToY410OrRGBA1010102(uint32_t *dst, const uint16_t *srcY,
+                                              const uint16_t *srcU, const uint16_t *srcV,
+                                              size_t srcYStride, size_t srcUStride,
+                                              size_t srcVStride, size_t dstStride, size_t width,
+                                              size_t height) {
+    if (isAtLeastT()) {
+        convertYUV420Planar16ToRGBA1010102(dst, srcY, srcU, srcV, srcYStride, srcUStride,
+                                           srcVStride, dstStride, width, height);
+    } else {
+        convertYUV420Planar16ToY410(dst, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
+                                    dstStride, width, height);
+    }
+}
+void convertYUV420Planar16ToYV12(uint8_t *dstY, uint8_t *dstU, uint8_t *dstV, const uint16_t *srcY,
+                                 const uint16_t *srcU, const uint16_t *srcV, size_t srcYStride,
+                                 size_t srcUStride, size_t srcVStride, size_t dstYStride,
+                                 size_t dstUVStride, size_t width, size_t height,
+                                 bool isMonochrome) {
+    for (size_t y = 0; y < height; ++y) {
+        for (size_t x = 0; x < width; ++x) {
+            dstY[x] = (uint8_t)(srcY[x] >> 2);
+        }
+        srcY += srcYStride;
+        dstY += dstYStride;
+    }
+
+    if (isMonochrome) {
+        // Fill with neutral U/V values.
+        for (size_t y = 0; y < (height + 1) / 2; ++y) {
+            memset(dstV, kNeutralUVBitDepth8, (width + 1) / 2);
+            memset(dstU, kNeutralUVBitDepth8, (width + 1) / 2);
+            dstV += dstUVStride;
+            dstU += dstUVStride;
+        }
+        return;
+    }
+
+    for (size_t y = 0; y < (height + 1) / 2; ++y) {
+        for (size_t x = 0; x < (width + 1) / 2; ++x) {
+            dstU[x] = (uint8_t)(srcU[x] >> 2);
+            dstV[x] = (uint8_t)(srcV[x] >> 2);
+        }
+        srcU += srcUStride;
+        srcV += srcVStride;
+        dstU += dstUVStride;
+        dstV += dstUVStride;
+    }
+}
+
+void convertYUV420Planar16ToP010(uint16_t *dstY, uint16_t *dstUV, const uint16_t *srcY,
+                                 const uint16_t *srcU, const uint16_t *srcV, size_t srcYStride,
+                                 size_t srcUStride, size_t srcVStride, size_t dstYStride,
+                                 size_t dstUVStride, size_t width, size_t height,
+                                 bool isMonochrome) {
+    for (size_t y = 0; y < height; ++y) {
+        for (size_t x = 0; x < width; ++x) {
+            dstY[x] = srcY[x] << 6;
+        }
+        srcY += srcYStride;
+        dstY += dstYStride;
+    }
+
+    if (isMonochrome) {
+        // Fill with neutral U/V values.
+        for (size_t y = 0; y < (height + 1) / 2; ++y) {
+            for (size_t x = 0; x < (width + 1) / 2; ++x) {
+                dstUV[2 * x] = kNeutralUVBitDepth10 << 6;
+                dstUV[2 * x + 1] = kNeutralUVBitDepth10 << 6;
+            }
+            dstUV += dstUVStride;
+        }
+        return;
+    }
+
+    for (size_t y = 0; y < (height + 1) / 2; ++y) {
+        for (size_t x = 0; x < (width + 1) / 2; ++x) {
+            dstUV[2 * x] = srcU[x] << 6;
+            dstUV[2 * x + 1] = srcV[x] << 6;
+        }
+        srcU += srcUStride;
+        srcV += srcVStride;
+        dstUV += dstUVStride;
+    }
+}
 std::unique_ptr<C2Work> SimpleC2Component::WorkQueue::pop_front() {
     std::unique_ptr<C2Work> work = std::move(mQueue.front().work);
     mQueue.pop_front();
@@ -591,6 +881,48 @@
     return hasQueuedWork;
 }
 
+int SimpleC2Component::getHalPixelFormatForBitDepth10(bool allowRGBA1010102) {
+    // Save supported hal pixel formats for bit depth of 10, the first time this is called
+    if (!mBitDepth10HalPixelFormats.size()) {
+        std::vector<int> halPixelFormats;
+        if (isAtLeastT()) {
+            halPixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
+        }
+        // since allowRGBA1010102 can chance in each call, but mBitDepth10HalPixelFormats
+        // is populated only once, allowRGBA1010102 is not considered at this stage.
+        halPixelFormats.push_back(HAL_PIXEL_FORMAT_RGBA_1010102);
+
+        for (int halPixelFormat : halPixelFormats) {
+            std::shared_ptr<C2GraphicBlock> block;
+
+            uint32_t gpuConsumerFlags = halPixelFormat == HAL_PIXEL_FORMAT_RGBA_1010102
+                                                ? C2AndroidMemoryUsage::HW_TEXTURE_READ
+                                                : 0;
+            C2MemoryUsage usage = {C2MemoryUsage::CPU_READ | gpuConsumerFlags,
+                                   C2MemoryUsage::CPU_WRITE};
+            // TODO(b/214411172) Use AHardwareBuffer_isSupported once it supports P010
+            c2_status_t status =
+                    mOutputBlockPool->fetchGraphicBlock(320, 240, halPixelFormat, usage, &block);
+            if (status == C2_OK) {
+                mBitDepth10HalPixelFormats.push_back(halPixelFormat);
+            }
+        }
+        // Add YV12 in the end as a fall-back option
+        mBitDepth10HalPixelFormats.push_back(HAL_PIXEL_FORMAT_YV12);
+    }
+    // From Android T onwards, HAL_PIXEL_FORMAT_RGBA_1010102 corresponds to true
+    // RGBA 1010102 format unlike earlier versions where it was used to represent
+    // YUVA 1010102 data
+    if (!isAtLeastT()) {
+        // When RGBA1010102 is not allowed and if the first supported hal pixel is format is
+        // HAL_PIXEL_FORMAT_RGBA_1010102, then return HAL_PIXEL_FORMAT_YV12
+        if (!allowRGBA1010102 && mBitDepth10HalPixelFormats[0] == HAL_PIXEL_FORMAT_RGBA_1010102) {
+            return HAL_PIXEL_FORMAT_YV12;
+        }
+    }
+    // Return the first entry from supported formats
+    return mBitDepth10HalPixelFormats[0];
+}
 std::shared_ptr<C2Buffer> SimpleC2Component::createLinearBuffer(
         const std::shared_ptr<C2LinearBlock> &block, size_t offset, size_t size) {
     return C2Buffer::CreateLinearBuffer(block->share(offset, size, ::C2Fence()));
diff --git a/media/codec2/components/base/SimpleC2Interface.cpp b/media/codec2/components/base/SimpleC2Interface.cpp
index 29740d1..993e602 100644
--- a/media/codec2/components/base/SimpleC2Interface.cpp
+++ b/media/codec2/components/base/SimpleC2Interface.cpp
@@ -28,6 +28,14 @@
 
 /* SimpleInterface */
 
+static C2R SubscribedParamIndicesSetter(
+        bool mayBlock, C2InterfaceHelper::C2P<C2SubscribedParamIndicesTuning> &me) {
+    (void)mayBlock;
+    (void)me;
+
+    return C2R::Ok();
+}
+
 SimpleInterface<void>::BaseParams::BaseParams(
         const std::shared_ptr<C2ReflectorHelper> &reflector,
         C2String name,
@@ -186,7 +194,7 @@
             .withDefault(C2SubscribedParamIndicesTuning::AllocShared(0u))
             .withFields({ C2F(mSubscribedParamIndices, m.values[0]).any(),
                           C2F(mSubscribedParamIndices, m.values).any() })
-            .withSetter(Setter<C2SubscribedParamIndicesTuning>::NonStrictValuesWithNoDeps)
+            .withSetter(SubscribedParamIndicesSetter)
             .build());
 
     /* TODO
diff --git a/media/codec2/components/base/include/SimpleC2Component.h b/media/codec2/components/base/include/SimpleC2Component.h
index e5e16d8..3172f29 100644
--- a/media/codec2/components/base/include/SimpleC2Component.h
+++ b/media/codec2/components/base/include/SimpleC2Component.h
@@ -27,7 +27,27 @@
 #include <media/stagefright/foundation/Mutexed.h>
 
 namespace android {
-
+bool isAtLeastT();
+void convertYUV420Planar8ToYV12(uint8_t *dstY, uint8_t *dstU, uint8_t *dstV, const uint8_t *srcY,
+                                const uint8_t *srcU, const uint8_t *srcV, size_t srcYStride,
+                                size_t srcUStride, size_t srcVStride, size_t dstYStride,
+                                size_t dstUVStride, uint32_t width, uint32_t height,
+                                bool isMonochrome = false);
+void convertYUV420Planar16ToY410OrRGBA1010102(uint32_t *dst, const uint16_t *srcY,
+                                              const uint16_t *srcU, const uint16_t *srcV,
+                                              size_t srcYStride, size_t srcUStride,
+                                              size_t srcVStride, size_t dstStride, size_t width,
+                                              size_t height);
+void convertYUV420Planar16ToYV12(uint8_t *dstY, uint8_t *dstU, uint8_t *dstV, const uint16_t *srcY,
+                                 const uint16_t *srcU, const uint16_t *srcV, size_t srcYStride,
+                                 size_t srcUStride, size_t srcVStride, size_t dstYStride,
+                                 size_t dstUVStride, size_t width, size_t height,
+                                 bool isMonochrome = false);
+void convertYUV420Planar16ToP010(uint16_t *dstY, uint16_t *dstUV, const uint16_t *srcY,
+                                 const uint16_t *srcU, const uint16_t *srcV, size_t srcYStride,
+                                 size_t srcUStride, size_t srcVStride, size_t dstYStride,
+                                 size_t dstUVStride, size_t width, size_t height,
+                                 bool isMonochrome = false);
 class SimpleC2Component
         : public C2Component, public std::enable_shared_from_this<SimpleC2Component> {
 public:
@@ -149,6 +169,7 @@
     static constexpr uint32_t NO_DRAIN = ~0u;
 
     C2ReadView mDummyReadView;
+    int getHalPixelFormatForBitDepth10(bool allowRGBA1010102);
 
 private:
     const std::shared_ptr<C2ComponentInterface> mIntf;
@@ -232,6 +253,7 @@
     class BlockingBlockPool;
     std::shared_ptr<BlockingBlockPool> mOutputBlockPool;
 
+    std::vector<int> mBitDepth10HalPixelFormats;
     SimpleC2Component() = delete;
 };
 
diff --git a/media/codec2/components/base/include/SimpleC2Interface.h b/media/codec2/components/base/include/SimpleC2Interface.h
index 2051d3d0..916f392 100644
--- a/media/codec2/components/base/include/SimpleC2Interface.h
+++ b/media/codec2/components/base/include/SimpleC2Interface.h
@@ -209,6 +209,7 @@
         return me.F(me.v.value).validatePossible(me.v.value);
     }
 
+    // TODO(b/230146771): fix crash
     static C2R NonStrictValuesWithNoDeps(
             bool mayBlock, C2InterfaceHelper::C2P<type> &me) {
         (void)mayBlock;
diff --git a/media/codec2/components/flac/C2SoftFlacEnc.cpp b/media/codec2/components/flac/C2SoftFlacEnc.cpp
index 6fead3a..182edfb 100644
--- a/media/codec2/components/flac/C2SoftFlacEnc.cpp
+++ b/media/codec2/components/flac/C2SoftFlacEnc.cpp
@@ -439,9 +439,6 @@
     }
     FLAC__bool ok = FLAC__stream_encoder_finish(mFlacStreamEncoder);
     if (!ok) return C2_CORRUPTED;
-    mIsFirstFrame = true;
-    mAnchorTimeStamp = 0ull;
-    mProcessedSamples = 0u;
 
     return C2_OK;
 }
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.cpp b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
index f857e87..701c22c 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.cpp
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
@@ -20,17 +20,14 @@
 
 #include <C2Debug.h>
 #include <C2PlatformSupport.h>
+#include <Codec2BufferUtils.h>
+#include <Codec2Mapper.h>
 #include <SimpleC2Interface.h>
 #include <log/log.h>
 #include <media/stagefright/foundation/AUtils.h>
 #include <media/stagefright/foundation/MediaDefs.h>
 
 namespace android {
-namespace {
-
-constexpr uint8_t NEUTRAL_UV_VALUE = 128;
-
-}  // namespace
 
 // codecname set and passed in as a compile flag from Android.bp
 constexpr char COMPONENT_NAME[] = CODECNAME;
@@ -156,11 +153,54 @@
             .withSetter(DefaultColorAspectsSetter)
             .build());
 
+      addParameter(
+              DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
+              .withDefault(new C2StreamColorAspectsInfo::input(
+                      0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
+                      C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+              .withFields({
+                  C2F(mCodedColorAspects, range).inRange(
+                              C2Color::RANGE_UNSPECIFIED,     C2Color::RANGE_OTHER),
+                  C2F(mCodedColorAspects, primaries).inRange(
+                              C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
+                  C2F(mCodedColorAspects, transfer).inRange(
+                              C2Color::TRANSFER_UNSPECIFIED,  C2Color::TRANSFER_OTHER),
+                  C2F(mCodedColorAspects, matrix).inRange(
+                              C2Color::MATRIX_UNSPECIFIED,    C2Color::MATRIX_OTHER)
+              })
+              .withSetter(CodedColorAspectsSetter)
+              .build());
+
+      addParameter(
+              DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
+              .withDefault(new C2StreamColorAspectsInfo::output(
+                      0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+                      C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+              .withFields({
+                  C2F(mColorAspects, range).inRange(
+                              C2Color::RANGE_UNSPECIFIED,     C2Color::RANGE_OTHER),
+                  C2F(mColorAspects, primaries).inRange(
+                              C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
+                  C2F(mColorAspects, transfer).inRange(
+                              C2Color::TRANSFER_UNSPECIFIED,  C2Color::TRANSFER_OTHER),
+                  C2F(mColorAspects, matrix).inRange(
+                              C2Color::MATRIX_UNSPECIFIED,    C2Color::MATRIX_OTHER)
+              })
+              .withSetter(ColorAspectsSetter, mDefaultColorAspects, mCodedColorAspects)
+              .build());
+
+    std::vector<uint32_t> pixelFormats = {HAL_PIXEL_FORMAT_YCBCR_420_888};
+    if (isAtLeastT()) {
+        pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
+    }
     // TODO: support more formats?
-    addParameter(DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
-                     .withConstValue(new C2StreamPixelFormatInfo::output(
-                         0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
-                     .build());
+    addParameter(
+            DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
+            .withDefault(new C2StreamPixelFormatInfo::output(
+                              0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
+            .withFields({C2F(mPixelFormat, value).oneOf(pixelFormats)})
+            .withSetter((Setter<decltype(*mPixelFormat)>::StrictValueWithNoDeps))
+            .build());
   }
 
   static C2R SizeSetter(bool mayBlock,
@@ -218,6 +258,37 @@
     return C2R::Ok();
   }
 
+  static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input> &me) {
+    (void)mayBlock;
+    if (me.v.range > C2Color::RANGE_OTHER) {
+      me.set().range = C2Color::RANGE_OTHER;
+    }
+    if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+      me.set().primaries = C2Color::PRIMARIES_OTHER;
+    }
+    if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+      me.set().transfer = C2Color::TRANSFER_OTHER;
+    }
+    if (me.v.matrix > C2Color::MATRIX_OTHER) {
+      me.set().matrix = C2Color::MATRIX_OTHER;
+    }
+    return C2R::Ok();
+  }
+
+  static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output> &me,
+                                const C2P<C2StreamColorAspectsTuning::output> &def,
+                                const C2P<C2StreamColorAspectsInfo::input> &coded) {
+    (void)mayBlock;
+    // take default values for all unspecified fields, and coded values for specified ones
+    me.set().range = coded.v.range == RANGE_UNSPECIFIED ? def.v.range : coded.v.range;
+    me.set().primaries = coded.v.primaries == PRIMARIES_UNSPECIFIED
+        ? def.v.primaries : coded.v.primaries;
+    me.set().transfer = coded.v.transfer == TRANSFER_UNSPECIFIED
+        ? def.v.transfer : coded.v.transfer;
+    me.set().matrix = coded.v.matrix == MATRIX_UNSPECIFIED ? def.v.matrix : coded.v.matrix;
+    return C2R::Ok();
+  }
+
   static C2R ProfileLevelSetter(
       bool mayBlock, C2P<C2StreamProfileLevelInfo::input> &me,
       const C2P<C2StreamPictureSizeInfo::output> &size) {
@@ -232,6 +303,10 @@
     return mDefaultColorAspects;
   }
 
+  std::shared_ptr<C2StreamColorAspectsInfo::output> getColorAspects_l() {
+      return mColorAspects;
+  }
+
   static C2R Hdr10PlusInfoInputSetter(bool mayBlock,
                                       C2P<C2StreamHdr10PlusInfo::input> &me) {
     (void)mayBlock;
@@ -254,6 +329,8 @@
   std::shared_ptr<C2StreamColorInfo::output> mColorInfo;
   std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormat;
   std::shared_ptr<C2StreamColorAspectsTuning::output> mDefaultColorAspects;
+  std::shared_ptr<C2StreamColorAspectsInfo::input> mCodedColorAspects;
+  std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
   std::shared_ptr<C2StreamHdr10PlusInfo::input> mHdr10PlusInfoInput;
   std::shared_ptr<C2StreamHdr10PlusInfo::output> mHdr10PlusInfoOutput;
 };
@@ -332,6 +409,7 @@
 bool C2SoftGav1Dec::initDecoder() {
   mSignalledError = false;
   mSignalledOutputEos = false;
+  mHalPixelFormat = HAL_PIXEL_FORMAT_YV12;
   mCodecCtx.reset(new libgav1::Decoder());
 
   if (mCodecCtx == nullptr) {
@@ -371,6 +449,10 @@
                                const std::shared_ptr<C2GraphicBlock> &block) {
   std::shared_ptr<C2Buffer> buffer =
       createGraphicBuffer(block, C2Rect(mWidth, mHeight));
+  {
+      IntfImpl::Lock lock = mIntf->lock();
+      buffer->setInfo(mIntf->getColorAspects_l());
+  }
   auto fillWork = [buffer, index](const std::unique_ptr<C2Work> &work) {
     uint32_t flags = 0;
     if ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) &&
@@ -465,148 +547,36 @@
   }
 }
 
-static void copyOutputBufferToYV12Frame(uint8_t *dstY, uint8_t *dstU, uint8_t *dstV,
-                                        const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
-                                        size_t srcYStride, size_t srcUStride, size_t srcVStride,
-                                        size_t dstYStride, size_t dstUVStride,
-                                        uint32_t width, uint32_t height,
-                                        bool isMonochrome) {
+void C2SoftGav1Dec::getVuiParams(const libgav1::DecoderBuffer *buffer) {
+    VuiColorAspects vuiColorAspects;
+    vuiColorAspects.primaries = buffer->color_primary;
+    vuiColorAspects.transfer = buffer->transfer_characteristics;
+    vuiColorAspects.coeffs = buffer->matrix_coefficients;
+    vuiColorAspects.fullRange = buffer->color_range;
 
-  for (size_t i = 0; i < height; ++i) {
-    memcpy(dstY, srcY, width);
-    srcY += srcYStride;
-    dstY += dstYStride;
-  }
-
-  if (isMonochrome) {
-    // Fill with neutral U/V values.
-    for (size_t i = 0; i < height / 2; ++i) {
-      memset(dstV, NEUTRAL_UV_VALUE, width / 2);
-      memset(dstU, NEUTRAL_UV_VALUE, width / 2);
-      dstV += dstUVStride;
-      dstU += dstUVStride;
+    // convert vui aspects to C2 values if changed
+    if (!(vuiColorAspects == mBitstreamColorAspects)) {
+        mBitstreamColorAspects = vuiColorAspects;
+        ColorAspects sfAspects;
+        C2StreamColorAspectsInfo::input codedAspects = { 0u };
+        ColorUtils::convertIsoColorAspectsToCodecAspects(
+                vuiColorAspects.primaries, vuiColorAspects.transfer, vuiColorAspects.coeffs,
+                vuiColorAspects.fullRange, sfAspects);
+        if (!C2Mapper::map(sfAspects.mPrimaries, &codedAspects.primaries)) {
+            codedAspects.primaries = C2Color::PRIMARIES_UNSPECIFIED;
+        }
+        if (!C2Mapper::map(sfAspects.mRange, &codedAspects.range)) {
+            codedAspects.range = C2Color::RANGE_UNSPECIFIED;
+        }
+        if (!C2Mapper::map(sfAspects.mMatrixCoeffs, &codedAspects.matrix)) {
+            codedAspects.matrix = C2Color::MATRIX_UNSPECIFIED;
+        }
+        if (!C2Mapper::map(sfAspects.mTransfer, &codedAspects.transfer)) {
+            codedAspects.transfer = C2Color::TRANSFER_UNSPECIFIED;
+        }
+        std::vector<std::unique_ptr<C2SettingResult>> failures;
+        mIntf->config({&codedAspects}, C2_MAY_BLOCK, &failures);
     }
-    return;
-  }
-
-  for (size_t i = 0; i < height / 2; ++i) {
-    memcpy(dstV, srcV, width / 2);
-    srcV += srcVStride;
-    dstV += dstUVStride;
-  }
-
-  for (size_t i = 0; i < height / 2; ++i) {
-    memcpy(dstU, srcU, width / 2);
-    srcU += srcUStride;
-    dstU += dstUVStride;
-  }
-}
-
-static void convertYUV420Planar16ToY410(uint32_t *dst, const uint16_t *srcY,
-                                        const uint16_t *srcU,
-                                        const uint16_t *srcV, size_t srcYStride,
-                                        size_t srcUStride, size_t srcVStride,
-                                        size_t dstStride, size_t width,
-                                        size_t height) {
-  // Converting two lines at a time, slightly faster
-  for (size_t y = 0; y < height; y += 2) {
-    uint32_t *dstTop = (uint32_t *)dst;
-    uint32_t *dstBot = (uint32_t *)(dst + dstStride);
-    uint16_t *ySrcTop = (uint16_t *)srcY;
-    uint16_t *ySrcBot = (uint16_t *)(srcY + srcYStride);
-    uint16_t *uSrc = (uint16_t *)srcU;
-    uint16_t *vSrc = (uint16_t *)srcV;
-
-    uint32_t u01, v01, y01, y23, y45, y67, uv0, uv1;
-    size_t x = 0;
-    for (; x < width - 3; x += 4) {
-      u01 = *((uint32_t *)uSrc);
-      uSrc += 2;
-      v01 = *((uint32_t *)vSrc);
-      vSrc += 2;
-
-      y01 = *((uint32_t *)ySrcTop);
-      ySrcTop += 2;
-      y23 = *((uint32_t *)ySrcTop);
-      ySrcTop += 2;
-      y45 = *((uint32_t *)ySrcBot);
-      ySrcBot += 2;
-      y67 = *((uint32_t *)ySrcBot);
-      ySrcBot += 2;
-
-      uv0 = (u01 & 0x3FF) | ((v01 & 0x3FF) << 20);
-      uv1 = (u01 >> 16) | ((v01 >> 16) << 20);
-
-      *dstTop++ = 3 << 30 | ((y01 & 0x3FF) << 10) | uv0;
-      *dstTop++ = 3 << 30 | ((y01 >> 16) << 10) | uv0;
-      *dstTop++ = 3 << 30 | ((y23 & 0x3FF) << 10) | uv1;
-      *dstTop++ = 3 << 30 | ((y23 >> 16) << 10) | uv1;
-
-      *dstBot++ = 3 << 30 | ((y45 & 0x3FF) << 10) | uv0;
-      *dstBot++ = 3 << 30 | ((y45 >> 16) << 10) | uv0;
-      *dstBot++ = 3 << 30 | ((y67 & 0x3FF) << 10) | uv1;
-      *dstBot++ = 3 << 30 | ((y67 >> 16) << 10) | uv1;
-    }
-
-    // There should be at most 2 more pixels to process. Note that we don't
-    // need to consider odd case as the buffer is always aligned to even.
-    if (x < width) {
-      u01 = *uSrc;
-      v01 = *vSrc;
-      y01 = *((uint32_t *)ySrcTop);
-      y45 = *((uint32_t *)ySrcBot);
-      uv0 = (u01 & 0x3FF) | ((v01 & 0x3FF) << 20);
-      *dstTop++ = ((y01 & 0x3FF) << 10) | uv0;
-      *dstTop++ = ((y01 >> 16) << 10) | uv0;
-      *dstBot++ = ((y45 & 0x3FF) << 10) | uv0;
-      *dstBot++ = ((y45 >> 16) << 10) | uv0;
-    }
-
-    srcY += srcYStride * 2;
-    srcU += srcUStride;
-    srcV += srcVStride;
-    dst += dstStride * 2;
-  }
-}
-
-static void convertYUV420Planar16ToYUV420Planar(
-    uint8_t *dstY, uint8_t *dstU, uint8_t *dstV,
-    const uint16_t *srcY, const uint16_t *srcU, const uint16_t *srcV,
-    size_t srcYStride, size_t srcUStride, size_t srcVStride,
-    size_t dstYStride, size_t dstUVStride,
-    size_t width, size_t height, bool isMonochrome) {
-
-  for (size_t y = 0; y < height; ++y) {
-    for (size_t x = 0; x < width; ++x) {
-      dstY[x] = (uint8_t)(srcY[x] >> 2);
-    }
-
-    srcY += srcYStride;
-    dstY += dstYStride;
-  }
-
-  if (isMonochrome) {
-    // Fill with neutral U/V values.
-    for (size_t y = 0; y < (height + 1) / 2; ++y) {
-      memset(dstV, NEUTRAL_UV_VALUE, (width + 1) / 2);
-      memset(dstU, NEUTRAL_UV_VALUE, (width + 1) / 2);
-      dstV += dstUVStride;
-      dstU += dstUVStride;
-    }
-    return;
-  }
-
-  for (size_t y = 0; y < (height + 1) / 2; ++y) {
-    for (size_t x = 0; x < (width + 1) / 2; ++x) {
-      dstU[x] = (uint8_t)(srcU[x] >> 2);
-      dstV[x] = (uint8_t)(srcV[x] >> 2);
-    }
-
-    srcU += srcUStride;
-    srcV += srcVStride;
-    dstU += dstUVStride;
-    dstV += dstUVStride;
-  }
 }
 
 bool C2SoftGav1Dec::outputBuffer(const std::shared_ptr<C2BlockPool> &pool,
@@ -651,6 +621,7 @@
     }
   }
 
+  getVuiParams(buffer);
   if (!(buffer->image_format == libgav1::kImageFormatYuv420 ||
         buffer->image_format == libgav1::kImageFormatMonochrome400)) {
     ALOGE("image_format %d not supported", buffer->image_format);
@@ -666,22 +637,42 @@
   uint32_t format = HAL_PIXEL_FORMAT_YV12;
   if (buffer->bitdepth == 10) {
     IntfImpl::Lock lock = mIntf->lock();
-    std::shared_ptr<C2StreamColorAspectsTuning::output> defaultColorAspects =
-        mIntf->getDefaultColorAspects_l();
-
-    if (defaultColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
-        defaultColorAspects->matrix == C2Color::MATRIX_BT2020 &&
-        defaultColorAspects->transfer == C2Color::TRANSFER_ST2084) {
-      if (buffer->image_format != libgav1::kImageFormatYuv420) {
+    std::shared_ptr<C2StreamColorAspectsInfo::output> codedColorAspects =
+        mIntf->getColorAspects_l();
+    bool allowRGBA1010102 = false;
+    if (codedColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
+        codedColorAspects->matrix == C2Color::MATRIX_BT2020 &&
+        codedColorAspects->transfer == C2Color::TRANSFER_ST2084) {
+      allowRGBA1010102 = true;
+    }
+    format = getHalPixelFormatForBitDepth10(allowRGBA1010102);
+    if ((format == HAL_PIXEL_FORMAT_RGBA_1010102) &&
+        (buffer->image_format != libgav1::kImageFormatYuv420)) {
         ALOGE("Only YUV420 output is supported when targeting RGBA_1010102");
-        mSignalledError = true;
-        work->result = C2_OMITTED;
-        work->workletsProcessed = 1u;
-        return false;
-      }
-      format = HAL_PIXEL_FORMAT_RGBA_1010102;
+      mSignalledError = true;
+      work->result = C2_OMITTED;
+      work->workletsProcessed = 1u;
+      return false;
     }
   }
+
+  if (mHalPixelFormat != format) {
+    C2StreamPixelFormatInfo::output pixelFormat(0u, format);
+    std::vector<std::unique_ptr<C2SettingResult>> failures;
+    c2_status_t err = mIntf->config({&pixelFormat }, C2_MAY_BLOCK, &failures);
+    if (err == C2_OK) {
+      work->worklets.front()->output.configUpdate.push_back(
+          C2Param::Copy(pixelFormat));
+    } else {
+      ALOGE("Config update pixelFormat failed");
+      mSignalledError = true;
+      work->workletsProcessed = 1u;
+      work->result = C2_CORRUPTED;
+      return UNKNOWN_ERROR;
+    }
+    mHalPixelFormat = format;
+  }
+
   C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
 
   c2_status_t err = pool->fetchGraphicBlock(align(mWidth, 16), mHeight, format,
@@ -721,22 +712,24 @@
     const uint16_t *srcV = (const uint16_t *)buffer->plane[2];
 
     if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
-      convertYUV420Planar16ToY410(
-          (uint32_t *)dstY, srcY, srcU, srcV, srcYStride / 2, srcUStride / 2,
-          srcVStride / 2, dstYStride / sizeof(uint32_t), mWidth, mHeight);
+        convertYUV420Planar16ToY410OrRGBA1010102((uint32_t *)dstY, srcY, srcU, srcV, srcYStride / 2,
+                                                 srcUStride / 2, srcVStride / 2,
+                                                 dstYStride / sizeof(uint32_t), mWidth, mHeight);
+    } else if (format == HAL_PIXEL_FORMAT_YCBCR_P010) {
+        convertYUV420Planar16ToP010((uint16_t *)dstY, (uint16_t *)dstU, srcY, srcU, srcV,
+                                    srcYStride / 2, srcUStride / 2, srcVStride / 2, dstYStride / 2,
+                                    dstUVStride / 2, mWidth, mHeight, isMonochrome);
     } else {
-      convertYUV420Planar16ToYUV420Planar(
-          dstY, dstU, dstV, srcY, srcU, srcV, srcYStride / 2, srcUStride / 2,
-          srcVStride / 2, dstYStride, dstUVStride, mWidth, mHeight,
-          isMonochrome);
+        convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride / 2,
+                                    srcUStride / 2, srcVStride / 2, dstYStride, dstUVStride, mWidth,
+                                    mHeight, isMonochrome);
     }
   } else {
     const uint8_t *srcY = (const uint8_t *)buffer->plane[0];
     const uint8_t *srcU = (const uint8_t *)buffer->plane[1];
     const uint8_t *srcV = (const uint8_t *)buffer->plane[2];
-    copyOutputBufferToYV12Frame(
-        dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
-        dstYStride, dstUVStride, mWidth, mHeight, isMonochrome);
+    convertYUV420Planar8ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
+                               srcVStride, dstYStride, dstUVStride, mWidth, mHeight, isMonochrome);
   }
   finishWork(buffer->user_private_data, work, std::move(block));
   block = nullptr;
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.h b/media/codec2/components/gav1/C2SoftGav1Dec.h
index 555adc9..a69a863 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.h
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.h
@@ -17,7 +17,10 @@
 #ifndef ANDROID_C2_SOFT_GAV1_DEC_H_
 #define ANDROID_C2_SOFT_GAV1_DEC_H_
 
+#include <media/stagefright/foundation/ColorUtils.h>
+
 #include <SimpleC2Component.h>
+#include <C2Config.h>
 #include "libgav1/src/gav1/decoder.h"
 #include "libgav1/src/gav1/decoder_settings.h"
 
@@ -51,15 +54,38 @@
   std::shared_ptr<IntfImpl> mIntf;
   std::unique_ptr<libgav1::Decoder> mCodecCtx;
 
+  uint32_t mHalPixelFormat;
   uint32_t mWidth;
   uint32_t mHeight;
   bool mSignalledOutputEos;
   bool mSignalledError;
 
+  // Color aspects. These are ISO values and are meant to detect changes in aspects to avoid
+  // converting them to C2 values for each frame
+  struct VuiColorAspects {
+      uint8_t primaries;
+      uint8_t transfer;
+      uint8_t coeffs;
+      uint8_t fullRange;
+
+      // default color aspects
+      VuiColorAspects()
+          : primaries(C2Color::PRIMARIES_UNSPECIFIED),
+            transfer(C2Color::TRANSFER_UNSPECIFIED),
+            coeffs(C2Color::MATRIX_UNSPECIFIED),
+            fullRange(C2Color::RANGE_UNSPECIFIED) { }
+
+      bool operator==(const VuiColorAspects &o) {
+          return primaries == o.primaries && transfer == o.transfer && coeffs == o.coeffs
+                  && fullRange == o.fullRange;
+      }
+  } mBitstreamColorAspects;
+
   struct timeval mTimeStart;  // Time at the start of decode()
   struct timeval mTimeEnd;    // Time at the end of decode()
 
   bool initDecoder();
+  void getVuiParams(const libgav1::DecoderBuffer *buffer);
   void destroyDecoder();
   void finishWork(uint64_t index, const std::unique_ptr<C2Work>& work,
                   const std::shared_ptr<C2GraphicBlock>& block);
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.cpp b/media/codec2/components/hevc/C2SoftHevcDec.cpp
index 6bcf3a2..5f5b2ef 100644
--- a/media/codec2/components/hevc/C2SoftHevcDec.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcDec.cpp
@@ -502,7 +502,7 @@
 status_t C2SoftHevcDec::initDecoder() {
     if (OK != createDecoder()) return UNKNOWN_ERROR;
     mNumCores = MIN(getCpuCoreCount(), MAX_NUM_CORES);
-    mStride = ALIGN32(mWidth);
+    mStride = ALIGN128(mWidth);
     mSignalledError = false;
     resetPlugin();
     (void) setNumCores();
@@ -768,20 +768,20 @@
         return C2_CORRUPTED;
     }
     if (mOutBlock &&
-            (mOutBlock->width() != ALIGN32(mWidth) || mOutBlock->height() != mHeight)) {
+            (mOutBlock->width() != ALIGN128(mWidth) || mOutBlock->height() != mHeight)) {
         mOutBlock.reset();
     }
     if (!mOutBlock) {
         uint32_t format = HAL_PIXEL_FORMAT_YV12;
         C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
         c2_status_t err =
-            pool->fetchGraphicBlock(ALIGN32(mWidth), mHeight, format, usage, &mOutBlock);
+            pool->fetchGraphicBlock(ALIGN128(mWidth), mHeight, format, usage, &mOutBlock);
         if (err != C2_OK) {
             ALOGE("fetchGraphicBlock for Output failed with status %d", err);
             return err;
         }
         ALOGV("provided (%dx%d) required (%dx%d)",
-              mOutBlock->width(), mOutBlock->height(), ALIGN32(mWidth), mHeight);
+              mOutBlock->width(), mOutBlock->height(), ALIGN128(mWidth), mHeight);
     }
 
     return C2_OK;
@@ -917,7 +917,7 @@
         if (0 < ps_decode_op->u4_pic_wd && 0 < ps_decode_op->u4_pic_ht) {
             if (mHeaderDecoded == false) {
                 mHeaderDecoded = true;
-                setParams(ALIGN32(ps_decode_op->u4_pic_wd), IVD_DECODE_FRAME);
+                setParams(ALIGN128(ps_decode_op->u4_pic_wd), IVD_DECODE_FRAME);
             }
             if (ps_decode_op->u4_pic_wd != mWidth ||  ps_decode_op->u4_pic_ht != mHeight) {
                 mWidth = ps_decode_op->u4_pic_wd;
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.h b/media/codec2/components/hevc/C2SoftHevcDec.h
index b9b0a48..b9296e9 100644
--- a/media/codec2/components/hevc/C2SoftHevcDec.h
+++ b/media/codec2/components/hevc/C2SoftHevcDec.h
@@ -36,7 +36,7 @@
 #define ivdext_ctl_set_num_cores_op_t   ihevcd_cxa_ctl_set_num_cores_op_t
 #define ivdext_ctl_get_vui_params_ip_t  ihevcd_cxa_ctl_get_vui_params_ip_t
 #define ivdext_ctl_get_vui_params_op_t  ihevcd_cxa_ctl_get_vui_params_op_t
-#define ALIGN32(x)                      ((((x) + 31) >> 5) << 5)
+#define ALIGN128(x)                     ((((x) + 127) >> 7) << 7)
 #define MAX_NUM_CORES                   4
 #define IVDEXT_CMD_CTL_SET_NUM_CORES    \
         (IVD_CONTROL_API_COMMAND_TYPE_T)IHEVCD_CXA_CMD_CTL_SET_NUM_CORES
diff --git a/media/codec2/components/hevc/C2SoftHevcEnc.cpp b/media/codec2/components/hevc/C2SoftHevcEnc.cpp
index 4bc1777..4f5caec 100644
--- a/media/codec2/components/hevc/C2SoftHevcEnc.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcEnc.cpp
@@ -123,7 +123,7 @@
         // matches size limits in codec library
         addParameter(
             DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
-                .withDefault(new C2StreamPictureSizeInfo::input(0u, 320, 240))
+                .withDefault(new C2StreamPictureSizeInfo::input(0u, 64, 64))
                 .withFields({
                     C2F(mSize, width).inRange(2, 1920, 2),
                     C2F(mSize, height).inRange(2, 1088, 2),
@@ -133,7 +133,7 @@
 
         addParameter(
             DefineParam(mFrameRate, C2_PARAMKEY_FRAME_RATE)
-                .withDefault(new C2StreamFrameRateInfo::output(0u, 30.))
+                .withDefault(new C2StreamFrameRateInfo::output(0u, 1.))
                 .withFields({C2F(mFrameRate, value).greaterThan(0.)})
                 .withSetter(
                     Setter<decltype(*mFrameRate)>::StrictValueWithNoDeps)
@@ -245,6 +245,19 @@
                 })
                 .withSetter(CodedColorAspectsSetter, mColorAspects)
                 .build());
+
+        addParameter(
+                DefineParam(mPictureQuantization, C2_PARAMKEY_PICTURE_QUANTIZATION)
+                .withDefault(C2StreamPictureQuantizationTuning::output::AllocShared(
+                        0 /* flexCount */, 0u /* stream */))
+                .withFields({C2F(mPictureQuantization, m.values[0].type_).oneOf(
+                                {C2Config::picture_type_t(I_FRAME),
+                                  C2Config::picture_type_t(P_FRAME),
+                                  C2Config::picture_type_t(B_FRAME)}),
+                             C2F(mPictureQuantization, m.values[0].min).any(),
+                             C2F(mPictureQuantization, m.values[0].max).any()})
+                .withSetter(PictureQuantizationSetter)
+                .build());
     }
 
     static C2R InputDelaySetter(
@@ -464,9 +477,69 @@
         me.set().matrix = coded.v.matrix;
         return C2R::Ok();
     }
+    static C2R PictureQuantizationSetter(bool mayBlock,
+                                         C2P<C2StreamPictureQuantizationTuning::output> &me) {
+        (void)mayBlock;
+
+        // these are the ones we're going to set, so want them to default
+        // to the DEFAULT values for the codec
+        int32_t iMin = HEVC_QP_MIN, pMin = HEVC_QP_MIN, bMin = HEVC_QP_MIN;
+        int32_t iMax = HEVC_QP_MAX, pMax = HEVC_QP_MAX, bMax = HEVC_QP_MAX;
+
+        for (size_t i = 0; i < me.v.flexCount(); ++i) {
+            const C2PictureQuantizationStruct &layer = me.v.m.values[i];
+
+            // layerMin is clamped to [HEVC_QP_MIN, layerMax] to avoid error
+            // cases where layer.min > layer.max
+            int32_t layerMax = std::clamp(layer.max, HEVC_QP_MIN, HEVC_QP_MAX);
+            int32_t layerMin = std::clamp(layer.min, HEVC_QP_MIN, layerMax);
+            if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
+                iMax = layerMax;
+                iMin = layerMin;
+                ALOGV("iMin %d iMax %d", iMin, iMax);
+            } else if (layer.type_ == C2Config::picture_type_t(P_FRAME)) {
+                pMax = layerMax;
+                pMin = layerMin;
+                ALOGV("pMin %d pMax %d", pMin, pMax);
+            } else if (layer.type_ == C2Config::picture_type_t(B_FRAME)) {
+                bMax = layerMax;
+                bMin = layerMin;
+                ALOGV("bMin %d bMax %d", bMin, bMax);
+            }
+        }
+
+        ALOGV("PictureQuantizationSetter(entry): i %d-%d p %d-%d b %d-%d",
+              iMin, iMax, pMin, pMax, bMin, bMax);
+
+        int32_t maxFrameQP = std::min(std::min(iMax, pMax), bMax);
+        int32_t minFrameQP = std::max(std::max(iMin, pMin), bMin);
+        if (minFrameQP > maxFrameQP) {
+            minFrameQP = maxFrameQP;
+        }
+
+        // put them back into the structure
+        for (size_t i = 0; i < me.v.flexCount(); ++i) {
+            const C2PictureQuantizationStruct &layer = me.v.m.values[i];
+
+            if (layer.type_ == C2Config::picture_type_t(I_FRAME) ||
+                layer.type_ == C2Config::picture_type_t(P_FRAME) ||
+                layer.type_ == C2Config::picture_type_t(B_FRAME)) {
+                me.set().m.values[i].max = maxFrameQP;
+                me.set().m.values[i].min = minFrameQP;
+            }
+        }
+
+        ALOGV("PictureQuantizationSetter(exit): i = p = b = %d-%d",
+              minFrameQP, maxFrameQP);
+
+        return C2R::Ok();
+    }
     std::shared_ptr<C2StreamColorAspectsInfo::output> getCodedColorAspects_l() {
         return mCodedColorAspects;
     }
+    std::shared_ptr<C2StreamPictureQuantizationTuning::output> getPictureQuantization_l() const {
+        return mPictureQuantization;
+    }
 
    private:
     std::shared_ptr<C2StreamUsageTuning::input> mUsage;
@@ -482,6 +555,7 @@
     std::shared_ptr<C2StreamGopTuning::output> mGop;
     std::shared_ptr<C2StreamColorAspectsInfo::input> mColorAspects;
     std::shared_ptr<C2StreamColorAspectsInfo::output> mCodedColorAspects;
+    std::shared_ptr<C2StreamPictureQuantizationTuning::output> mPictureQuantization;
 };
 
 static size_t GetCPUCoreCount() {
@@ -654,12 +728,41 @@
         mEncParams.s_coding_tools_prms.i4_max_temporal_layers = 3;
     }
 
-    switch (mBitrateMode->value) {
-        case C2Config::BITRATE_IGNORE:
-            mEncParams.s_config_prms.i4_rate_control_mode = 3;
-            mEncParams.s_tgt_lyr_prms.as_tgt_params[0].ai4_frame_qp[0] =
-                getQpFromQuality(mQuality->value);
+    // we resolved out-of-bound and unspecified values in PictureQuantizationSetter()
+    // so we can start with defaults that are overridden as needed.
+    int32_t maxFrameQP = mEncParams.s_config_prms.i4_max_frame_qp;
+    int32_t minFrameQP = mEncParams.s_config_prms.i4_min_frame_qp;
+
+    for (size_t i = 0; i < mQpBounds->flexCount(); ++i) {
+        const C2PictureQuantizationStruct &layer = mQpBounds->m.values[i];
+
+        // no need to loop, hevc library takes same range for I/P/B picture type
+        if (layer.type_ == C2Config::picture_type_t(I_FRAME) ||
+            layer.type_ == C2Config::picture_type_t(P_FRAME) ||
+            layer.type_ == C2Config::picture_type_t(B_FRAME)) {
+
+            maxFrameQP = layer.max;
+            minFrameQP = layer.min;
             break;
+        }
+    }
+    mEncParams.s_config_prms.i4_max_frame_qp = maxFrameQP;
+    mEncParams.s_config_prms.i4_min_frame_qp = minFrameQP;
+
+    ALOGV("MaxFrameQp: %d MinFrameQp: %d", maxFrameQP, minFrameQP);
+
+    mEncParams.s_tgt_lyr_prms.as_tgt_params[0].ai4_frame_qp[0] =
+        std::clamp(kDefaultInitQP, minFrameQP, maxFrameQP);
+
+    switch (mBitrateMode->value) {
+        case C2Config::BITRATE_IGNORE: {
+            mEncParams.s_config_prms.i4_rate_control_mode = 3;
+            // ensure initial qp values are within our newly configured bounds
+            int32_t frameQp = getQpFromQuality(mQuality->value);
+            mEncParams.s_tgt_lyr_prms.as_tgt_params[0].ai4_frame_qp[0] =
+                std::clamp(frameQp, minFrameQP, maxFrameQP);
+            break;
+        }
         case C2Config::BITRATE_CONST:
             mEncParams.s_config_prms.i4_rate_control_mode = 5;
             break;
@@ -723,6 +826,7 @@
         mGop = mIntf->getGop_l();
         mRequestSync = mIntf->getRequestSync_l();
         mColorAspects = mIntf->getCodedColorAspects_l();
+        mQpBounds = mIntf->getPictureQuantization_l();;
     }
 
     c2_status_t status = initEncParams();
diff --git a/media/codec2/components/hevc/C2SoftHevcEnc.h b/media/codec2/components/hevc/C2SoftHevcEnc.h
index 9dbf682..4217a8b 100644
--- a/media/codec2/components/hevc/C2SoftHevcEnc.h
+++ b/media/codec2/components/hevc/C2SoftHevcEnc.h
@@ -42,6 +42,11 @@
 #define DEFAULT_B_FRAMES     0
 #define DEFAULT_RC_LOOKAHEAD 0
 
+#define HEVC_QP_MIN 1
+#define HEVC_QP_MAX 51
+
+constexpr int32_t kDefaultInitQP = 32;
+
 struct C2SoftHevcEnc : public SimpleC2Component {
     class IntfImpl;
 
@@ -90,6 +95,7 @@
     std::shared_ptr<C2StreamGopTuning::output> mGop;
     std::shared_ptr<C2StreamRequestSyncFrameTuning::output> mRequestSync;
     std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
+    std::shared_ptr<C2StreamPictureQuantizationTuning::output> mQpBounds;
 #ifdef FILE_DUMP_ENABLE
     char mInFile[200];
     char mOutFile[200];
diff --git a/media/codec2/components/mp3/C2SoftMp3Dec.cpp b/media/codec2/components/mp3/C2SoftMp3Dec.cpp
index e394670..149c6ee 100644
--- a/media/codec2/components/mp3/C2SoftMp3Dec.cpp
+++ b/media/codec2/components/mp3/C2SoftMp3Dec.cpp
@@ -321,6 +321,13 @@
     return C2_OK;
 }
 
+static void fillEmptyWork(const std::unique_ptr<C2Work> &work) {
+    work->worklets.front()->output.flags = work->input.flags;
+    work->worklets.front()->output.buffers.clear();
+    work->worklets.front()->output.ordinal = work->input.ordinal;
+    work->workletsProcessed = 1u;
+}
+
 // TODO: Can overall error checking be improved? As in the check for validity of
 //       work, pool ptr, work->input.buffers.size() == 1, ...
 // TODO: Blind removal of 529 samples from the output may not work. Because
@@ -486,17 +493,17 @@
         }
     }
 
-    int64_t outTimeStamp = mProcessedSamples * 1000000ll / samplingRate;
-    mProcessedSamples += ((outSize - outOffset) / (numChannels * sizeof(int16_t)));
-    ALOGV("out buffer attr. offset %d size %d timestamp %" PRId64 " ", outOffset,
-          outSize - outOffset, mAnchorTimeStamp + outTimeStamp);
-    decodedSizes.clear();
-    work->worklets.front()->output.flags = work->input.flags;
-    work->worklets.front()->output.buffers.clear();
-    work->worklets.front()->output.buffers.push_back(
-            createLinearBuffer(block, outOffset, outSize - outOffset));
-    work->worklets.front()->output.ordinal = work->input.ordinal;
-    work->worklets.front()->output.ordinal.timestamp = mAnchorTimeStamp + outTimeStamp;
+    fillEmptyWork(work);
+    if (samplingRate && numChannels) {
+        int64_t outTimeStamp = mProcessedSamples * 1000000ll / samplingRate;
+        mProcessedSamples += ((outSize - outOffset) / (numChannels * sizeof(int16_t)));
+        ALOGV("out buffer attr. offset %d size %d timestamp %" PRId64 " ", outOffset,
+               outSize - outOffset, mAnchorTimeStamp + outTimeStamp);
+        decodedSizes.clear();
+        work->worklets.front()->output.buffers.push_back(
+                createLinearBuffer(block, outOffset, outSize - outOffset));
+        work->worklets.front()->output.ordinal.timestamp = mAnchorTimeStamp + outTimeStamp;
+    }
     if (eos) {
         mSignalledOutputEos = true;
         ALOGV("signalled EOS");
diff --git a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
index b1cf388..5f9b30b 100644
--- a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
+++ b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
@@ -572,7 +572,7 @@
     if (OK != createDecoder()) return UNKNOWN_ERROR;
 
     mNumCores = MIN(getCpuCoreCount(), MAX_NUM_CORES);
-    mStride = ALIGN32(mWidth);
+    mStride = ALIGN128(mWidth);
     mSignalledError = false;
     resetPlugin();
     (void) setNumCores();
@@ -845,20 +845,20 @@
         return C2_CORRUPTED;
     }
     if (mOutBlock &&
-            (mOutBlock->width() != ALIGN32(mWidth) || mOutBlock->height() != mHeight)) {
+            (mOutBlock->width() != ALIGN128(mWidth) || mOutBlock->height() != mHeight)) {
         mOutBlock.reset();
     }
     if (!mOutBlock) {
         uint32_t format = HAL_PIXEL_FORMAT_YV12;
         C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
         c2_status_t err =
-            pool->fetchGraphicBlock(ALIGN32(mWidth), mHeight, format, usage, &mOutBlock);
+            pool->fetchGraphicBlock(ALIGN128(mWidth), mHeight, format, usage, &mOutBlock);
         if (err != C2_OK) {
             ALOGE("fetchGraphicBlock for Output failed with status %d", err);
             return err;
         }
         ALOGV("provided (%dx%d) required (%dx%d)",
-              mOutBlock->width(), mOutBlock->height(), ALIGN32(mWidth), mHeight);
+              mOutBlock->width(), mOutBlock->height(), ALIGN128(mWidth), mHeight);
     }
 
     return C2_OK;
diff --git a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h
index fd66304a..8a29c14 100644
--- a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h
+++ b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h
@@ -37,7 +37,7 @@
 #define ivdext_ctl_set_num_cores_op_t   impeg2d_ctl_set_num_cores_op_t
 #define ivdext_ctl_get_seq_info_ip_t    impeg2d_ctl_get_seq_info_ip_t
 #define ivdext_ctl_get_seq_info_op_t    impeg2d_ctl_get_seq_info_op_t
-#define ALIGN32(x)                      ((((x) + 31) >> 5) << 5)
+#define ALIGN128(x)                     ((((x) + 127) >> 7) << 7)
 #define MAX_NUM_CORES                   4
 #define IVDEXT_CMD_CTL_SET_NUM_CORES    \
         (IVD_CONTROL_API_COMMAND_TYPE_T)IMPEG2D_CMD_CTL_SET_NUM_CORES
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
index ddd312f..54a1d0e 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
@@ -228,7 +228,6 @@
         const std::shared_ptr<IntfImpl> &intfImpl)
     : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
       mIntf(intfImpl),
-      mDecHandle(nullptr),
       mOutputBuffer{},
       mInitialized(false) {
 }
@@ -244,9 +243,7 @@
 
 c2_status_t C2SoftMpeg4Dec::onStop() {
     if (mInitialized) {
-        if (mDecHandle) {
-            PVCleanUpVideoDecoder(mDecHandle);
-        }
+        PVCleanUpVideoDecoder(&mVideoDecControls);
         mInitialized = false;
     }
     for (int32_t i = 0; i < kNumOutputBuffers; ++i) {
@@ -269,28 +266,15 @@
 }
 
 void C2SoftMpeg4Dec::onRelease() {
-    if (mInitialized) {
-        if (mDecHandle) {
-            PVCleanUpVideoDecoder(mDecHandle);
-            delete mDecHandle;
-            mDecHandle = nullptr;
-        }
-        mInitialized = false;
-    }
+    (void)onStop();
     if (mOutBlock) {
         mOutBlock.reset();
     }
-    for (int32_t i = 0; i < kNumOutputBuffers; ++i) {
-        if (mOutputBuffer[i]) {
-            free(mOutputBuffer[i]);
-            mOutputBuffer[i] = nullptr;
-        }
-    }
 }
 
 c2_status_t C2SoftMpeg4Dec::onFlush_sm() {
     if (mInitialized) {
-        if (PV_TRUE != PVResetVideoDecoder(mDecHandle)) {
+        if (PV_TRUE != PVResetVideoDecoder(&mVideoDecControls)) {
             return C2_CORRUPTED;
         }
     }
@@ -305,14 +289,8 @@
 #else
     mIsMpeg4 = false;
 #endif
-    if (!mDecHandle) {
-        mDecHandle = new tagvideoDecControls;
-    }
-    if (!mDecHandle) {
-        ALOGE("mDecHandle is null");
-        return NO_MEMORY;
-    }
-    memset(mDecHandle, 0, sizeof(tagvideoDecControls));
+
+    memset(&mVideoDecControls, 0, sizeof(tagvideoDecControls));
 
     /* TODO: bring these values to 352 and 288. It cannot be done as of now
      * because, h263 doesn't seem to allow port reconfiguration. In OMX, the
@@ -368,10 +346,6 @@
 }
 
 c2_status_t C2SoftMpeg4Dec::ensureDecoderState(const std::shared_ptr<C2BlockPool> &pool) {
-    if (!mDecHandle) {
-        ALOGE("not supposed to be here, invalid decoder context");
-        return C2_CORRUPTED;
-    }
 
     mOutputBufferSize = align(mIntf->getMaxWidth(), 16) * align(mIntf->getMaxHeight(), 16) * 3 / 2;
     for (int32_t i = 0; i < kNumOutputBuffers; ++i) {
@@ -402,10 +376,10 @@
 
 bool C2SoftMpeg4Dec::handleResChange(const std::unique_ptr<C2Work> &work) {
     uint32_t disp_width, disp_height;
-    PVGetVideoDimensions(mDecHandle, (int32 *)&disp_width, (int32 *)&disp_height);
+    PVGetVideoDimensions(&mVideoDecControls, (int32 *)&disp_width, (int32 *)&disp_height);
 
     uint32_t buf_width, buf_height;
-    PVGetBufferDimensions(mDecHandle, (int32 *)&buf_width, (int32 *)&buf_height);
+    PVGetBufferDimensions(&mVideoDecControls, (int32 *)&buf_width, (int32 *)&buf_height);
 
     CHECK_LE(disp_width, buf_width);
     CHECK_LE(disp_height, buf_height);
@@ -426,13 +400,14 @@
         }
 
         if (!mIsMpeg4) {
-            PVCleanUpVideoDecoder(mDecHandle);
+            PVCleanUpVideoDecoder(&mVideoDecControls);
 
             uint8_t *vol_data[1]{};
             int32_t vol_size = 0;
 
             if (!PVInitVideoDecoder(
-                    mDecHandle, vol_data, &vol_size, 1, mIntf->getMaxWidth(), mIntf->getMaxHeight(), H263_MODE)) {
+                    &mVideoDecControls, vol_data, &vol_size, 1, mIntf->getMaxWidth(),
+                                                        mIntf->getMaxHeight(), H263_MODE)) {
                 ALOGE("Error in PVInitVideoDecoder H263_MODE while resChanged was set to true");
                 mSignalledError = true;
                 work->result = C2_CORRUPTED;
@@ -444,40 +419,6 @@
     return resChanged;
 }
 
-/* TODO: can remove temporary copy after library supports writing to display
- * buffer Y, U and V plane pointers using stride info. */
-static void copyOutputBufferToYuvPlanarFrame(
-        uint8_t *dstY, uint8_t *dstU, uint8_t *dstV, uint8_t *src,
-        size_t dstYStride, size_t dstUVStride,
-        size_t srcYStride, uint32_t width,
-        uint32_t height) {
-    size_t srcUVStride = srcYStride / 2;
-    uint8_t *srcStart = src;
-
-    size_t vStride = align(height, 16);
-    for (size_t i = 0; i < height; ++i) {
-         memcpy(dstY, src, width);
-         src += srcYStride;
-         dstY += dstYStride;
-    }
-
-    /* U buffer */
-    src = srcStart + vStride * srcYStride;
-    for (size_t i = 0; i < height / 2; ++i) {
-         memcpy(dstU, src, width / 2);
-         src += srcUVStride;
-         dstU += dstUVStride;
-    }
-
-    /* V buffer */
-    src = srcStart + vStride * srcYStride * 5 / 4;
-    for (size_t i = 0; i < height / 2; ++i) {
-         memcpy(dstV, src, width / 2);
-         src += srcUVStride;
-         dstV += dstUVStride;
-    }
-}
-
 void C2SoftMpeg4Dec::process(
         const std::unique_ptr<C2Work> &work,
         const std::shared_ptr<C2BlockPool> &pool) {
@@ -522,7 +463,7 @@
     uint32_t *start_code = (uint32_t *)bitstream;
     bool volHeader = *start_code == 0xB0010000;
     if (volHeader) {
-        PVCleanUpVideoDecoder(mDecHandle);
+        PVCleanUpVideoDecoder(&mVideoDecControls);
         mInitialized = false;
     }
 
@@ -537,7 +478,7 @@
         }
         MP4DecodingMode mode = (mIsMpeg4) ? MPEG4_MODE : H263_MODE;
         if (!PVInitVideoDecoder(
-                mDecHandle, vol_data, &vol_size, 1,
+                &mVideoDecControls, vol_data, &vol_size, 1,
                 mIntf->getMaxWidth(), mIntf->getMaxHeight(), mode)) {
             ALOGE("PVInitVideoDecoder failed. Unsupported content?");
             mSignalledError = true;
@@ -545,7 +486,7 @@
             return;
         }
         mInitialized = true;
-        MP4DecodingMode actualMode = PVGetDecBitstreamMode(mDecHandle);
+        MP4DecodingMode actualMode = PVGetDecBitstreamMode(&mVideoDecControls);
         if (mode != actualMode) {
             ALOGE("Decoded mode not same as actual mode of the decoder");
             mSignalledError = true;
@@ -553,7 +494,7 @@
             return;
         }
 
-        PVSetPostProcType(mDecHandle, 0);
+        PVSetPostProcType(&mVideoDecControls, 0);
         if (handleResChange(work)) {
             ALOGI("Setting width and height");
             C2StreamPictureSizeInfo::output size(0u, mWidth, mHeight);
@@ -590,7 +531,7 @@
             return;
         }
 
-        uint32_t yFrameSize = sizeof(uint8) * mDecHandle->size;
+        uint32_t yFrameSize = sizeof(uint8) * mVideoDecControls.size;
         if (mOutputBufferSize < yFrameSize * 3 / 2){
             ALOGE("Too small output buffer: %zu bytes", mOutputBufferSize);
             mSignalledError = true;
@@ -599,7 +540,7 @@
         }
 
         if (!mFramesConfigured) {
-            PVSetReferenceYUV(mDecHandle,mOutputBuffer[1]);
+            PVSetReferenceYUV(&mVideoDecControls,mOutputBuffer[1]);
             mFramesConfigured = true;
         }
 
@@ -610,7 +551,7 @@
         uint8_t *bitstreamTmp = bitstream;
         uint32_t timestamp = workIndex;
         if (PVDecodeVopHeader(
-                    mDecHandle, &bitstreamTmp, &timestamp, &tmpInSize,
+                    &mVideoDecControls, &bitstreamTmp, &timestamp, &tmpInSize,
                     &header_info, &useExtTimestamp,
                     mOutputBuffer[mNumSamplesOutput & 1]) != PV_TRUE) {
             ALOGE("failed to decode vop header.");
@@ -642,7 +583,7 @@
             continue;
         }
 
-        if (PVDecodeVopBody(mDecHandle, &tmpInSize) != PV_TRUE) {
+        if (PVDecodeVopBody(&mVideoDecControls, &tmpInSize) != PV_TRUE) {
             ALOGE("failed to decode video frame.");
             mSignalledError = true;
             work->result = C2_CORRUPTED;
@@ -661,11 +602,17 @@
         C2PlanarLayout layout = wView.layout();
         size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
         size_t dstUVStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
-        (void)copyOutputBufferToYuvPlanarFrame(
-                outputBufferY, outputBufferU, outputBufferV,
-                mOutputBuffer[mNumSamplesOutput & 1],
-                dstYStride, dstUVStride,
-                align(mWidth, 16), mWidth, mHeight);
+        size_t srcYStride = align(mWidth, 16);
+        size_t srcUStride = srcYStride / 2;
+        size_t srcVStride = srcYStride / 2;
+        size_t vStride = align(mHeight, 16);
+        const uint8_t *srcY = (const uint8_t *)mOutputBuffer[mNumSamplesOutput & 1];
+        const uint8_t *srcU = (const uint8_t *)srcY + vStride * srcYStride;
+        const uint8_t *srcV = (const uint8_t *)srcY + vStride * srcYStride * 5 / 4;
+
+        convertYUV420Planar8ToYV12(outputBufferY, outputBufferU, outputBufferV, srcY, srcU, srcV,
+                                   srcYStride, srcUStride, srcVStride, dstYStride, dstUVStride,
+                                   mWidth, mHeight);
 
         inPos += inSize - (size_t)tmpInSize;
         finishWork(workIndex, work);
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.h b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.h
index 716a095..fed04c9 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.h
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.h
@@ -19,8 +19,8 @@
 
 #include <SimpleC2Component.h>
 
+#include <mp4dec_api.h>
 
-struct tagvideoDecControls;
 
 namespace android {
 
@@ -54,7 +54,7 @@
     bool handleResChange(const std::unique_ptr<C2Work> &work);
 
     std::shared_ptr<IntfImpl> mIntf;
-    tagvideoDecControls *mDecHandle;
+    tagvideoDecControls mVideoDecControls;
     std::shared_ptr<C2GraphicBlock> mOutBlock;
     uint8_t *mOutputBuffer[kNumOutputBuffers];
     size_t  mOutputBufferSize;
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
index 3c87531..3bfec66 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
@@ -436,16 +436,18 @@
         }
 
         ++mNumInputFrames;
-        std::unique_ptr<C2StreamInitDataInfo::output> csd =
-            C2StreamInitDataInfo::output::AllocUnique(outputSize, 0u);
-        if (!csd) {
-            ALOGE("CSD allocation failed");
-            mSignalledError = true;
-            work->result = C2_NO_MEMORY;
-            return;
+        if (outputSize) {
+            std::unique_ptr<C2StreamInitDataInfo::output> csd =
+                C2StreamInitDataInfo::output::AllocUnique(outputSize, 0u);
+            if (!csd) {
+                ALOGE("CSD allocation failed");
+                mSignalledError = true;
+                work->result = C2_NO_MEMORY;
+                return;
+            }
+            memcpy(csd->m.value, outPtr, outputSize);
+            work->worklets.front()->output.configUpdate.push_back(std::move(csd));
         }
-        memcpy(csd->m.value, outPtr, outputSize);
-        work->worklets.front()->output.configUpdate.push_back(std::move(csd));
     }
 
     // handle dynamic bitrate change
diff --git a/media/codec2/components/opus/C2SoftOpusEnc.cpp b/media/codec2/components/opus/C2SoftOpusEnc.cpp
index 370d33c..cdc3be0 100644
--- a/media/codec2/components/opus/C2SoftOpusEnc.cpp
+++ b/media/codec2/components/opus/C2SoftOpusEnc.cpp
@@ -245,7 +245,7 @@
     mIsFirstFrame = true;
     mEncoderFlushed = false;
     mBufferAvailable = false;
-    mAnchorTimeStamp = 0ull;
+    mAnchorTimeStamp = 0;
     mProcessedSamples = 0;
     mFilledLen = 0;
     mFrameDurationMs = DEFAULT_FRAME_DURATION_MS;
@@ -266,7 +266,7 @@
     mIsFirstFrame = true;
     mEncoderFlushed = false;
     mBufferAvailable = false;
-    mAnchorTimeStamp = 0ull;
+    mAnchorTimeStamp = 0;
     mProcessedSamples = 0u;
     mFilledLen = 0;
     if (mEncoder) {
@@ -363,7 +363,7 @@
         }
     }
     if (mIsFirstFrame && inSize > 0) {
-        mAnchorTimeStamp = work->input.ordinal.timestamp.peekull();
+        mAnchorTimeStamp = work->input.ordinal.timestamp.peekll();
         mIsFirstFrame = false;
     }
 
@@ -386,7 +386,7 @@
     size_t inPos = 0;
     size_t processSize = 0;
     mBytesEncoded = 0;
-    uint64_t outTimeStamp = 0u;
+    int64_t outTimeStamp = 0;
     std::shared_ptr<C2Buffer> buffer;
     uint64_t inputIndex = work->input.ordinal.frameIndex.peeku();
     const uint8_t* inPtr = rView.data() + inOffset;
@@ -584,7 +584,7 @@
         mOutputBlock.reset();
     }
     mProcessedSamples += (mNumPcmBytesPerInputFrame / sizeof(int16_t));
-    uint64_t outTimeStamp =
+    int64_t outTimeStamp =
         mProcessedSamples * 1000000ll / mChannelCount / mSampleRate;
     outOrdinal.frameIndex = mOutIndex++;
     outOrdinal.timestamp = mAnchorTimeStamp + outTimeStamp;
@@ -612,7 +612,7 @@
         return C2_OMITTED;
     }
     mIsFirstFrame = true;
-    mAnchorTimeStamp = 0ull;
+    mAnchorTimeStamp = 0;
     mProcessedSamples = 0u;
     return drainInternal(pool, nullptr);
 }
diff --git a/media/codec2/components/opus/C2SoftOpusEnc.h b/media/codec2/components/opus/C2SoftOpusEnc.h
index 2b4d8f2..733a6bcd2 100644
--- a/media/codec2/components/opus/C2SoftOpusEnc.h
+++ b/media/codec2/components/opus/C2SoftOpusEnc.h
@@ -67,7 +67,7 @@
     uint32_t mSampleRate;
     uint32_t mChannelCount;
     uint32_t mFrameDurationMs;
-    uint64_t mAnchorTimeStamp;
+    int64_t mAnchorTimeStamp;
     uint64_t mProcessedSamples;
     // Codec delay in ns
     uint64_t mCodecDelay;
diff --git a/media/codec2/components/tests/Android.bp b/media/codec2/components/tests/Android.bp
index 3c68eee..be2abf2 100644
--- a/media/codec2/components/tests/Android.bp
+++ b/media/codec2/components/tests/Android.bp
@@ -9,44 +9,13 @@
 
 cc_defaults {
     name: "C2SoftCodecTest-defaults",
+    defaults: [ "libcodec2-static-defaults" ],
     gtest: true,
     host_supported: false,
     srcs: [
         "C2SoftCodecTest.cpp",
     ],
 
-    static_libs: [
-        "liblog",
-        "libion",
-        "libfmq",
-        "libbase",
-        "libutils",
-        "libcutils",
-        "libcodec2",
-        "libhidlbase",
-        "libdmabufheap",
-        "libcodec2_vndk",
-        "libnativewindow",
-        "libcodec2_soft_common",
-        "libsfplugin_ccodec_utils",
-        "libstagefright_foundation",
-        "libstagefright_bufferpool@2.0.1",
-        "android.hardware.graphics.mapper@2.0",
-        "android.hardware.graphics.mapper@3.0",
-        "android.hardware.media.bufferpool@2.0",
-        "android.hardware.graphics.allocator@2.0",
-        "android.hardware.graphics.allocator@3.0",
-        "android.hardware.graphics.bufferqueue@2.0",
-    ],
-
-    shared_libs: [
-        "libui",
-        "libdl",
-        "libhardware",
-        "libvndksupport",
-        "libprocessgroup",
-    ],
-
     cflags: [
         "-Wall",
         "-Werror",
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.cpp b/media/codec2/components/vpx/C2SoftVpxDec.cpp
index 2953d90..c2ccfa0 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxDec.cpp
@@ -19,12 +19,12 @@
 #include <log/log.h>
 
 #include <algorithm>
-
 #include <media/stagefright/foundation/AUtils.h>
 #include <media/stagefright/foundation/MediaDefs.h>
 
 #include <C2Debug.h>
 #include <C2PlatformSupport.h>
+#include <Codec2BufferUtils.h>
 #include <SimpleC2Interface.h>
 
 #include "C2SoftVpxDec.h"
@@ -149,8 +149,16 @@
 #else
         addParameter(
                 DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
-                .withConstValue(new C2StreamProfileLevelInfo::input(0u,
-                        C2Config::PROFILE_UNUSED, C2Config::LEVEL_UNUSED))
+                .withDefault(new C2StreamProfileLevelInfo::input(0u,
+                        C2Config::PROFILE_VP8_0, C2Config::LEVEL_UNUSED))
+                .withFields({
+                    C2F(mProfileLevel, profile).equalTo(
+                        PROFILE_VP8_0
+                    ),
+                    C2F(mProfileLevel, level).equalTo(
+                        LEVEL_UNUSED),
+                })
+                .withSetter(ProfileLevelSetter, mSize)
                 .build());
 #endif
 
@@ -209,11 +217,20 @@
                 .build());
 
         // TODO: support more formats?
+        std::vector<uint32_t> pixelFormats = {HAL_PIXEL_FORMAT_YCBCR_420_888};
+#ifdef VP9
+        if (isAtLeastT()) {
+            pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
+        }
+#endif
         addParameter(
                 DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
-                .withConstValue(new C2StreamPixelFormatInfo::output(
-                                     0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
+                .withDefault(new C2StreamPixelFormatInfo::output(
+                                  0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
+                .withFields({C2F(mPixelFormat, value).oneOf(pixelFormats)})
+                .withSetter((Setter<decltype(*mPixelFormat)>::StrictValueWithNoDeps))
                 .build());
+
     }
 
     static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::output> &oldMe,
@@ -415,7 +432,7 @@
 #else
     mMode = MODE_VP8;
 #endif
-
+    mHalPixelFormat = HAL_PIXEL_FORMAT_YV12;
     mWidth = 320;
     mHeight = 240;
     mFrameParallelMode = false;
@@ -630,125 +647,6 @@
     }
 }
 
-static void copyOutputBufferToYuvPlanarFrame(
-        uint8_t *dstY, uint8_t *dstU, uint8_t *dstV,
-        const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
-        size_t srcYStride, size_t srcUStride, size_t srcVStride,
-        size_t dstYStride, size_t dstUVStride,
-        uint32_t width, uint32_t height) {
-
-    for (size_t i = 0; i < height; ++i) {
-         memcpy(dstY, srcY, width);
-         srcY += srcYStride;
-         dstY += dstYStride;
-    }
-
-    for (size_t i = 0; i < height / 2; ++i) {
-         memcpy(dstV, srcV, width / 2);
-         srcV += srcVStride;
-         dstV += dstUVStride;
-    }
-
-    for (size_t i = 0; i < height / 2; ++i) {
-         memcpy(dstU, srcU, width / 2);
-         srcU += srcUStride;
-         dstU += dstUVStride;
-    }
-
-}
-
-static void convertYUV420Planar16ToY410(uint32_t *dst,
-        const uint16_t *srcY, const uint16_t *srcU, const uint16_t *srcV,
-        size_t srcYStride, size_t srcUStride, size_t srcVStride,
-        size_t dstStride, size_t width, size_t height) {
-
-    // Converting two lines at a time, slightly faster
-    for (size_t y = 0; y < height; y += 2) {
-        uint32_t *dstTop = (uint32_t *) dst;
-        uint32_t *dstBot = (uint32_t *) (dst + dstStride);
-        uint16_t *ySrcTop = (uint16_t*) srcY;
-        uint16_t *ySrcBot = (uint16_t*) (srcY + srcYStride);
-        uint16_t *uSrc = (uint16_t*) srcU;
-        uint16_t *vSrc = (uint16_t*) srcV;
-
-        uint32_t u01, v01, y01, y23, y45, y67, uv0, uv1;
-        size_t x = 0;
-        for (; x < width - 3; x += 4) {
-
-            u01 = *((uint32_t*)uSrc); uSrc += 2;
-            v01 = *((uint32_t*)vSrc); vSrc += 2;
-
-            y01 = *((uint32_t*)ySrcTop); ySrcTop += 2;
-            y23 = *((uint32_t*)ySrcTop); ySrcTop += 2;
-            y45 = *((uint32_t*)ySrcBot); ySrcBot += 2;
-            y67 = *((uint32_t*)ySrcBot); ySrcBot += 2;
-
-            uv0 = (u01 & 0x3FF) | ((v01 & 0x3FF) << 20);
-            uv1 = (u01 >> 16) | ((v01 >> 16) << 20);
-
-            *dstTop++ = 3 << 30 | ((y01 & 0x3FF) << 10) | uv0;
-            *dstTop++ = 3 << 30 | ((y01 >> 16) << 10) | uv0;
-            *dstTop++ = 3 << 30 | ((y23 & 0x3FF) << 10) | uv1;
-            *dstTop++ = 3 << 30 | ((y23 >> 16) << 10) | uv1;
-
-            *dstBot++ = 3 << 30 | ((y45 & 0x3FF) << 10) | uv0;
-            *dstBot++ = 3 << 30 | ((y45 >> 16) << 10) | uv0;
-            *dstBot++ = 3 << 30 | ((y67 & 0x3FF) << 10) | uv1;
-            *dstBot++ = 3 << 30 | ((y67 >> 16) << 10) | uv1;
-        }
-
-        // There should be at most 2 more pixels to process. Note that we don't
-        // need to consider odd case as the buffer is always aligned to even.
-        if (x < width) {
-            u01 = *uSrc;
-            v01 = *vSrc;
-            y01 = *((uint32_t*)ySrcTop);
-            y45 = *((uint32_t*)ySrcBot);
-            uv0 = (u01 & 0x3FF) | ((v01 & 0x3FF) << 20);
-            *dstTop++ = ((y01 & 0x3FF) << 10) | uv0;
-            *dstTop++ = ((y01 >> 16) << 10) | uv0;
-            *dstBot++ = ((y45 & 0x3FF) << 10) | uv0;
-            *dstBot++ = ((y45 >> 16) << 10) | uv0;
-        }
-
-        srcY += srcYStride * 2;
-        srcU += srcUStride;
-        srcV += srcVStride;
-        dst += dstStride * 2;
-    }
-
-    return;
-}
-
-static void convertYUV420Planar16ToYUV420Planar(
-        uint8_t *dstY, uint8_t *dstU, uint8_t *dstV,
-        const uint16_t *srcY, const uint16_t *srcU, const uint16_t *srcV,
-        size_t srcYStride, size_t srcUStride, size_t srcVStride,
-        size_t dstYStride, size_t dstUVStride,
-        size_t width, size_t height) {
-
-    for (size_t y = 0; y < height; ++y) {
-        for (size_t x = 0; x < width; ++x) {
-            dstY[x] = (uint8_t)(srcY[x] >> 2);
-        }
-
-        srcY += srcYStride;
-        dstY += dstYStride;
-    }
-
-    for (size_t y = 0; y < (height + 1) / 2; ++y) {
-        for (size_t x = 0; x < (width + 1) / 2; ++x) {
-            dstU[x] = (uint8_t)(srcU[x] >> 2);
-            dstV[x] = (uint8_t)(srcV[x] >> 2);
-        }
-
-        srcU += srcUStride;
-        srcV += srcVStride;
-        dstU += dstUVStride;
-        dstV += dstUVStride;
-    }
-    return;
-}
 status_t C2SoftVpxDec::outputBuffer(
         const std::shared_ptr<C2BlockPool> &pool,
         const std::unique_ptr<C2Work> &work)
@@ -792,13 +690,32 @@
     if (img->fmt == VPX_IMG_FMT_I42016) {
         IntfImpl::Lock lock = mIntf->lock();
         std::shared_ptr<C2StreamColorAspectsTuning::output> defaultColorAspects = mIntf->getDefaultColorAspects_l();
-
+        bool allowRGBA1010102 = false;
         if (defaultColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
             defaultColorAspects->matrix == C2Color::MATRIX_BT2020 &&
             defaultColorAspects->transfer == C2Color::TRANSFER_ST2084) {
-            format = HAL_PIXEL_FORMAT_RGBA_1010102;
+            allowRGBA1010102 = true;
         }
+        format = getHalPixelFormatForBitDepth10(allowRGBA1010102);
     }
+
+    if (mHalPixelFormat != format) {
+        C2StreamPixelFormatInfo::output pixelFormat(0u, format);
+        std::vector<std::unique_ptr<C2SettingResult>> failures;
+        c2_status_t err = mIntf->config({&pixelFormat }, C2_MAY_BLOCK, &failures);
+        if (err == C2_OK) {
+            work->worklets.front()->output.configUpdate.push_back(
+                C2Param::Copy(pixelFormat));
+        } else {
+            ALOGE("Config update pixelFormat failed");
+            mSignalledError = true;
+            work->workletsProcessed = 1u;
+            work->result = C2_CORRUPTED;
+            return UNKNOWN_ERROR;
+        }
+        mHalPixelFormat = format;
+    }
+
     C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
     c2_status_t err = pool->fetchGraphicBlock(align(mWidth, 16), mHeight, format, usage, &block);
     if (err != C2_OK) {
@@ -843,7 +760,7 @@
                         [dstY, srcY, srcU, srcV,
                          srcYStride, srcUStride, srcVStride, dstYStride,
                          width = mWidth, height = std::min(mHeight - i, kHeight)] {
-                            convertYUV420Planar16ToY410(
+                            convertYUV420Planar16ToY410OrRGBA1010102(
                                     (uint32_t *)dstY, srcY, srcU, srcV, srcYStride / 2,
                                     srcUStride / 2, srcVStride / 2, dstYStride / sizeof(uint32_t),
                                     width, height);
@@ -859,24 +776,22 @@
                 queue->cond.signal();
                 queue.waitForCondition(queue->cond);
             }
+        } else if (format == HAL_PIXEL_FORMAT_YCBCR_P010) {
+            convertYUV420Planar16ToP010((uint16_t *)dstY, (uint16_t *)dstU, srcY, srcU, srcV,
+                                        srcYStride / 2, srcUStride / 2, srcVStride / 2,
+                                        dstYStride / 2, dstUVStride / 2, mWidth, mHeight);
         } else {
-            convertYUV420Planar16ToYUV420Planar(dstY, dstU, dstV,
-                                                srcY, srcU, srcV,
-                                                srcYStride / 2, srcUStride / 2, srcVStride / 2,
-                                                dstYStride, dstUVStride,
-                                                mWidth, mHeight);
+            convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride / 2,
+                                        srcUStride / 2, srcVStride / 2, dstYStride, dstUVStride,
+                                        mWidth, mHeight);
         }
     } else {
         const uint8_t *srcY = (const uint8_t *)img->planes[VPX_PLANE_Y];
         const uint8_t *srcU = (const uint8_t *)img->planes[VPX_PLANE_U];
         const uint8_t *srcV = (const uint8_t *)img->planes[VPX_PLANE_V];
 
-        copyOutputBufferToYuvPlanarFrame(
-                dstY, dstU, dstV,
-                srcY, srcU, srcV,
-                srcYStride, srcUStride, srcVStride,
-                dstYStride, dstUVStride,
-                mWidth, mHeight);
+        convertYUV420Planar8ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
+                                   srcVStride, dstYStride, dstUVStride, mWidth, mHeight);
     }
     finishWork(((c2_cntr64_t *)img->user_priv)->peekull(), work, std::move(block));
     return OK;
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.h b/media/codec2/components/vpx/C2SoftVpxDec.h
index 2065165..5564766 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.h
+++ b/media/codec2/components/vpx/C2SoftVpxDec.h
@@ -67,6 +67,7 @@
     vpx_codec_ctx_t *mCodecCtx;
     bool mFrameParallelMode;  // Frame parallel is only supported by VP9 decoder.
 
+    uint32_t mHalPixelFormat;
     uint32_t mWidth;
     uint32_t mHeight;
     bool mSignalledOutputEos;
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.cpp b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
index 7486d27..617769b 100644
--- a/media/codec2/components/vpx/C2SoftVpxEnc.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
@@ -31,6 +31,255 @@
 
 namespace android {
 
+C2SoftVpxEnc::IntfImpl::IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
+    : SimpleInterface<void>::BaseParams(
+            helper,
+            COMPONENT_NAME,
+            C2Component::KIND_ENCODER,
+            C2Component::DOMAIN_VIDEO,
+            MEDIA_MIMETYPE_VIDEO) {
+    noPrivateBuffers(); // TODO: account for our buffers here
+    noInputReferences();
+    noOutputReferences();
+    noInputLatency();
+    noTimeStretch();
+    setDerivedInstance(this);
+
+    addParameter(
+            DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
+            .withConstValue(new C2ComponentAttributesSetting(
+                C2Component::ATTRIB_IS_TEMPORAL))
+            .build());
+
+    addParameter(
+            DefineParam(mUsage, C2_PARAMKEY_INPUT_STREAM_USAGE)
+            .withConstValue(new C2StreamUsageTuning::input(
+                    0u, (uint64_t)C2MemoryUsage::CPU_READ))
+            .build());
+
+    addParameter(
+        DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
+            .withDefault(new C2StreamPictureSizeInfo::input(0u, 320, 240))
+            .withFields({
+                C2F(mSize, width).inRange(2, 2048, 2),
+                C2F(mSize, height).inRange(2, 2048, 2),
+            })
+            .withSetter(SizeSetter)
+            .build());
+
+    addParameter(
+        DefineParam(mBitrateMode, C2_PARAMKEY_BITRATE_MODE)
+            .withDefault(new C2StreamBitrateModeTuning::output(
+                    0u, C2Config::BITRATE_VARIABLE))
+            .withFields({
+                C2F(mBitrateMode, value).oneOf({
+                    C2Config::BITRATE_CONST, C2Config::BITRATE_VARIABLE })
+            })
+            .withSetter(
+                Setter<decltype(*mBitrateMode)>::StrictValueWithNoDeps)
+            .build());
+
+    addParameter(
+        DefineParam(mFrameRate, C2_PARAMKEY_FRAME_RATE)
+            .withDefault(new C2StreamFrameRateInfo::output(0u, 30.))
+            // TODO: More restriction?
+            .withFields({C2F(mFrameRate, value).greaterThan(0.)})
+            .withSetter(
+                Setter<decltype(*mFrameRate)>::StrictValueWithNoDeps)
+            .build());
+
+    addParameter(
+        DefineParam(mLayering, C2_PARAMKEY_TEMPORAL_LAYERING)
+            .withDefault(C2StreamTemporalLayeringTuning::output::AllocShared(0u, 0, 0, 0))
+            .withFields({
+                C2F(mLayering, m.layerCount).inRange(0, 4),
+                C2F(mLayering, m.bLayerCount).inRange(0, 0),
+                C2F(mLayering, m.bitrateRatios).inRange(0., 1.)
+            })
+            .withSetter(LayeringSetter)
+            .build());
+
+    addParameter(
+            DefineParam(mSyncFramePeriod, C2_PARAMKEY_SYNC_FRAME_INTERVAL)
+            .withDefault(new C2StreamSyncFrameIntervalTuning::output(0u, 1000000))
+            .withFields({C2F(mSyncFramePeriod, value).any()})
+            .withSetter(Setter<decltype(*mSyncFramePeriod)>::StrictValueWithNoDeps)
+            .build());
+
+    addParameter(
+        DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
+            .withDefault(new C2StreamBitrateInfo::output(0u, 64000))
+            .withFields({C2F(mBitrate, value).inRange(4096, 40000000)})
+            .withSetter(BitrateSetter)
+            .build());
+
+    addParameter(
+            DefineParam(mIntraRefresh, C2_PARAMKEY_INTRA_REFRESH)
+            .withConstValue(new C2StreamIntraRefreshTuning::output(
+                            0u, C2Config::INTRA_REFRESH_DISABLED, 0.))
+            .build());
+#ifdef VP9
+    addParameter(
+            DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+            .withDefault(new C2StreamProfileLevelInfo::output(
+                    0u, PROFILE_VP9_0, LEVEL_VP9_4_1))
+            .withFields({
+                C2F(mProfileLevel, profile).equalTo(
+                    PROFILE_VP9_0
+                ),
+                C2F(mProfileLevel, level).equalTo(
+                    LEVEL_VP9_4_1),
+            })
+            .withSetter(ProfileLevelSetter)
+            .build());
+#else
+    addParameter(
+            DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+            .withDefault(new C2StreamProfileLevelInfo::output(
+                    0u, PROFILE_VP8_0, LEVEL_UNUSED))
+            .withFields({
+                C2F(mProfileLevel, profile).equalTo(
+                    PROFILE_VP8_0
+                ),
+                C2F(mProfileLevel, level).equalTo(
+                    LEVEL_UNUSED),
+            })
+            .withSetter(ProfileLevelSetter)
+            .build());
+#endif
+    addParameter(
+            DefineParam(mRequestSync, C2_PARAMKEY_REQUEST_SYNC_FRAME)
+            .withDefault(new C2StreamRequestSyncFrameTuning::output(0u, C2_FALSE))
+            .withFields({C2F(mRequestSync, value).oneOf({ C2_FALSE, C2_TRUE }) })
+            .withSetter(Setter<decltype(*mRequestSync)>::NonStrictValueWithNoDeps)
+            .build());
+
+    addParameter(
+            DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
+            .withDefault(new C2StreamColorAspectsInfo::input(
+                    0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+                    C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+            .withFields({
+                C2F(mColorAspects, range).inRange(
+                            C2Color::RANGE_UNSPECIFIED,     C2Color::RANGE_OTHER),
+                C2F(mColorAspects, primaries).inRange(
+                            C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
+                C2F(mColorAspects, transfer).inRange(
+                            C2Color::TRANSFER_UNSPECIFIED,  C2Color::TRANSFER_OTHER),
+                C2F(mColorAspects, matrix).inRange(
+                            C2Color::MATRIX_UNSPECIFIED,    C2Color::MATRIX_OTHER)
+            })
+            .withSetter(ColorAspectsSetter)
+            .build());
+
+    addParameter(
+            DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
+            .withDefault(new C2StreamColorAspectsInfo::output(
+                    0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
+                    C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+            .withFields({
+                C2F(mCodedColorAspects, range).inRange(
+                            C2Color::RANGE_UNSPECIFIED,     C2Color::RANGE_OTHER),
+                C2F(mCodedColorAspects, primaries).inRange(
+                            C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
+                C2F(mCodedColorAspects, transfer).inRange(
+                            C2Color::TRANSFER_UNSPECIFIED,  C2Color::TRANSFER_OTHER),
+                C2F(mCodedColorAspects, matrix).inRange(
+                            C2Color::MATRIX_UNSPECIFIED,    C2Color::MATRIX_OTHER)
+            })
+            .withSetter(CodedColorAspectsSetter, mColorAspects)
+            .build());
+}
+
+C2R C2SoftVpxEnc::IntfImpl::BitrateSetter(bool mayBlock, C2P<C2StreamBitrateInfo::output> &me) {
+    (void)mayBlock;
+    C2R res = C2R::Ok();
+    if (me.v.value < 4096) {
+        me.set().value = 4096;
+    }
+    return res;
+}
+
+C2R C2SoftVpxEnc::IntfImpl::SizeSetter(bool mayBlock,
+                                       const C2P<C2StreamPictureSizeInfo::input>& oldMe,
+                                       C2P<C2StreamPictureSizeInfo::input>& me) {
+    (void)mayBlock;
+    C2R res = C2R::Ok();
+    if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
+        res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
+        me.set().width = oldMe.v.width;
+    }
+    if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
+        res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
+        me.set().height = oldMe.v.height;
+    }
+    return res;
+}
+
+C2R C2SoftVpxEnc::IntfImpl::ProfileLevelSetter(bool mayBlock,
+                                               C2P<C2StreamProfileLevelInfo::output>& me) {
+    (void)mayBlock;
+    if (!me.F(me.v.profile).supportsAtAll(me.v.profile)) {
+        me.set().profile = PROFILE_VP9_0;
+    }
+    if (!me.F(me.v.level).supportsAtAll(me.v.level)) {
+        me.set().level = LEVEL_VP9_4_1;
+    }
+    return C2R::Ok();
+}
+
+C2R C2SoftVpxEnc::IntfImpl::LayeringSetter(bool mayBlock,
+                                           C2P<C2StreamTemporalLayeringTuning::output>& me) {
+    (void)mayBlock;
+    C2R res = C2R::Ok();
+    if (me.v.m.layerCount > 4) {
+        me.set().m.layerCount = 4;
+    }
+    me.set().m.bLayerCount = 0;
+    // ensure ratios are monotonic and clamped between 0 and 1
+    for (size_t ix = 0; ix < me.v.flexCount(); ++ix) {
+        me.set().m.bitrateRatios[ix] = c2_clamp(
+            ix > 0 ? me.v.m.bitrateRatios[ix - 1] : 0, me.v.m.bitrateRatios[ix], 1.);
+    }
+    ALOGI("setting temporal layering %u + %u", me.v.m.layerCount, me.v.m.bLayerCount);
+    return res;
+}
+
+uint32_t C2SoftVpxEnc::IntfImpl::getSyncFramePeriod() const {
+    if (mSyncFramePeriod->value < 0 || mSyncFramePeriod->value == INT64_MAX) {
+        return 0;
+    }
+    double period = mSyncFramePeriod->value / 1e6 * mFrameRate->value;
+    return (uint32_t)c2_max(c2_min(period + 0.5, double(UINT32_MAX)), 1.);
+}
+C2R C2SoftVpxEnc::IntfImpl::ColorAspectsSetter(bool mayBlock,
+                                               C2P<C2StreamColorAspectsInfo::input>& me) {
+    (void)mayBlock;
+    if (me.v.range > C2Color::RANGE_OTHER) {
+            me.set().range = C2Color::RANGE_OTHER;
+    }
+    if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+            me.set().primaries = C2Color::PRIMARIES_OTHER;
+    }
+    if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+            me.set().transfer = C2Color::TRANSFER_OTHER;
+    }
+    if (me.v.matrix > C2Color::MATRIX_OTHER) {
+            me.set().matrix = C2Color::MATRIX_OTHER;
+    }
+    return C2R::Ok();
+}
+C2R C2SoftVpxEnc::IntfImpl::CodedColorAspectsSetter(
+        bool mayBlock, C2P<C2StreamColorAspectsInfo::output>& me,
+        const C2P<C2StreamColorAspectsInfo::input>& coded) {
+    (void)mayBlock;
+    me.set().range = coded.v.range;
+    me.set().primaries = coded.v.primaries;
+    me.set().transfer = coded.v.transfer;
+    me.set().matrix = coded.v.matrix;
+    return C2R::Ok();
+}
+
 #if 0
 static size_t getCpuCoreCount() {
     long cpuCoreCount = 1;
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.h b/media/codec2/components/vpx/C2SoftVpxEnc.h
index c98b802..e296c8f 100644
--- a/media/codec2/components/vpx/C2SoftVpxEnc.h
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.h
@@ -237,245 +237,38 @@
 
 class C2SoftVpxEnc::IntfImpl : public SimpleInterface<void>::BaseParams {
    public:
-    explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
-        : SimpleInterface<void>::BaseParams(
-                helper,
-                COMPONENT_NAME,
-                C2Component::KIND_ENCODER,
-                C2Component::DOMAIN_VIDEO,
-                MEDIA_MIMETYPE_VIDEO) {
-        noPrivateBuffers(); // TODO: account for our buffers here
-        noInputReferences();
-        noOutputReferences();
-        noInputLatency();
-        noTimeStretch();
-        setDerivedInstance(this);
-
-        addParameter(
-                DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
-                .withConstValue(new C2ComponentAttributesSetting(
-                    C2Component::ATTRIB_IS_TEMPORAL))
-                .build());
-
-        addParameter(
-                DefineParam(mUsage, C2_PARAMKEY_INPUT_STREAM_USAGE)
-                .withConstValue(new C2StreamUsageTuning::input(
-                        0u, (uint64_t)C2MemoryUsage::CPU_READ))
-                .build());
-
-        addParameter(
-            DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
-                .withDefault(new C2StreamPictureSizeInfo::input(0u, 320, 240))
-                .withFields({
-                    C2F(mSize, width).inRange(2, 2048, 2),
-                    C2F(mSize, height).inRange(2, 2048, 2),
-                })
-                .withSetter(SizeSetter)
-                .build());
-
-        addParameter(
-            DefineParam(mBitrateMode, C2_PARAMKEY_BITRATE_MODE)
-                .withDefault(new C2StreamBitrateModeTuning::output(
-                        0u, C2Config::BITRATE_VARIABLE))
-                .withFields({
-                    C2F(mBitrateMode, value).oneOf({
-                        C2Config::BITRATE_CONST, C2Config::BITRATE_VARIABLE })
-                })
-                .withSetter(
-                    Setter<decltype(*mBitrateMode)>::StrictValueWithNoDeps)
-                .build());
-
-        addParameter(
-            DefineParam(mFrameRate, C2_PARAMKEY_FRAME_RATE)
-                .withDefault(new C2StreamFrameRateInfo::output(0u, 30.))
-                // TODO: More restriction?
-                .withFields({C2F(mFrameRate, value).greaterThan(0.)})
-                .withSetter(
-                    Setter<decltype(*mFrameRate)>::StrictValueWithNoDeps)
-                .build());
-
-        addParameter(
-            DefineParam(mLayering, C2_PARAMKEY_TEMPORAL_LAYERING)
-                .withDefault(C2StreamTemporalLayeringTuning::output::AllocShared(0u, 0, 0, 0))
-                .withFields({
-                    C2F(mLayering, m.layerCount).inRange(0, 4),
-                    C2F(mLayering, m.bLayerCount).inRange(0, 0),
-                    C2F(mLayering, m.bitrateRatios).inRange(0., 1.)
-                })
-                .withSetter(LayeringSetter)
-                .build());
-
-        addParameter(
-                DefineParam(mSyncFramePeriod, C2_PARAMKEY_SYNC_FRAME_INTERVAL)
-                .withDefault(new C2StreamSyncFrameIntervalTuning::output(0u, 1000000))
-                .withFields({C2F(mSyncFramePeriod, value).any()})
-                .withSetter(Setter<decltype(*mSyncFramePeriod)>::StrictValueWithNoDeps)
-                .build());
-
-        addParameter(
-            DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
-                .withDefault(new C2StreamBitrateInfo::output(0u, 64000))
-                .withFields({C2F(mBitrate, value).inRange(4096, 40000000)})
-                .withSetter(BitrateSetter)
-                .build());
-
-        addParameter(
-                DefineParam(mIntraRefresh, C2_PARAMKEY_INTRA_REFRESH)
-                .withConstValue(new C2StreamIntraRefreshTuning::output(
-                             0u, C2Config::INTRA_REFRESH_DISABLED, 0.))
-                .build());
-
-        addParameter(
-        DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
-        .withDefault(new C2StreamProfileLevelInfo::output(
-                0u, PROFILE_VP9_0, LEVEL_VP9_4_1))
-        .withFields({
-            C2F(mProfileLevel, profile).equalTo(
-                PROFILE_VP9_0
-            ),
-            C2F(mProfileLevel, level).equalTo(
-                LEVEL_VP9_4_1),
-        })
-        .withSetter(ProfileLevelSetter)
-        .build());
-
-        addParameter(
-                DefineParam(mRequestSync, C2_PARAMKEY_REQUEST_SYNC_FRAME)
-                .withDefault(new C2StreamRequestSyncFrameTuning::output(0u, C2_FALSE))
-                .withFields({C2F(mRequestSync, value).oneOf({ C2_FALSE, C2_TRUE }) })
-                .withSetter(Setter<decltype(*mRequestSync)>::NonStrictValueWithNoDeps)
-                .build());
-
-        addParameter(
-                DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
-                .withDefault(new C2StreamColorAspectsInfo::input(
-                        0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
-                        C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
-                .withFields({
-                    C2F(mColorAspects, range).inRange(
-                                C2Color::RANGE_UNSPECIFIED,     C2Color::RANGE_OTHER),
-                    C2F(mColorAspects, primaries).inRange(
-                                C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
-                    C2F(mColorAspects, transfer).inRange(
-                                C2Color::TRANSFER_UNSPECIFIED,  C2Color::TRANSFER_OTHER),
-                    C2F(mColorAspects, matrix).inRange(
-                                C2Color::MATRIX_UNSPECIFIED,    C2Color::MATRIX_OTHER)
-                })
-                .withSetter(ColorAspectsSetter)
-                .build());
-
-        addParameter(
-                DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
-                .withDefault(new C2StreamColorAspectsInfo::output(
-                        0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
-                        C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
-                .withFields({
-                    C2F(mCodedColorAspects, range).inRange(
-                                C2Color::RANGE_UNSPECIFIED,     C2Color::RANGE_OTHER),
-                    C2F(mCodedColorAspects, primaries).inRange(
-                                C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
-                    C2F(mCodedColorAspects, transfer).inRange(
-                                C2Color::TRANSFER_UNSPECIFIED,  C2Color::TRANSFER_OTHER),
-                    C2F(mCodedColorAspects, matrix).inRange(
-                                C2Color::MATRIX_UNSPECIFIED,    C2Color::MATRIX_OTHER)
-                })
-                .withSetter(CodedColorAspectsSetter, mColorAspects)
-                .build());
-    }
-
-    static C2R BitrateSetter(bool mayBlock, C2P<C2StreamBitrateInfo::output> &me) {
-        (void)mayBlock;
-        C2R res = C2R::Ok();
-        if (me.v.value <= 4096) {
-            me.set().value = 4096;
-        }
-        return res;
-    }
+    explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper);
+    static C2R BitrateSetter(bool mayBlock, C2P<C2StreamBitrateInfo::output> &me);
 
     static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::input> &oldMe,
-                          C2P<C2StreamPictureSizeInfo::input> &me) {
-        (void)mayBlock;
-        C2R res = C2R::Ok();
-        if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
-            res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
-            me.set().width = oldMe.v.width;
-        }
-        if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
-            res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
-            me.set().height = oldMe.v.height;
-        }
-        return res;
-    }
+                          C2P<C2StreamPictureSizeInfo::input> &me);
 
     static C2R ProfileLevelSetter(
             bool mayBlock,
-            C2P<C2StreamProfileLevelInfo::output> &me) {
-        (void)mayBlock;
-        if (!me.F(me.v.profile).supportsAtAll(me.v.profile)) {
-            me.set().profile = PROFILE_VP9_0;
-        }
-        if (!me.F(me.v.level).supportsAtAll(me.v.level)) {
-            me.set().level = LEVEL_VP9_4_1;
-        }
-        return C2R::Ok();
-    }
+            C2P<C2StreamProfileLevelInfo::output> &me);
 
-    static C2R LayeringSetter(bool mayBlock, C2P<C2StreamTemporalLayeringTuning::output>& me) {
-        (void)mayBlock;
-        C2R res = C2R::Ok();
-        if (me.v.m.layerCount > 4) {
-            me.set().m.layerCount = 4;
-        }
-        me.set().m.bLayerCount = 0;
-        // ensure ratios are monotonic and clamped between 0 and 1
-        for (size_t ix = 0; ix < me.v.flexCount(); ++ix) {
-            me.set().m.bitrateRatios[ix] = c2_clamp(
-                ix > 0 ? me.v.m.bitrateRatios[ix - 1] : 0, me.v.m.bitrateRatios[ix], 1.);
-        }
-        ALOGI("setting temporal layering %u + %u", me.v.m.layerCount, me.v.m.bLayerCount);
-        return res;
-    }
+    static C2R LayeringSetter(bool mayBlock, C2P<C2StreamTemporalLayeringTuning::output>& me);
 
     // unsafe getters
     std::shared_ptr<C2StreamPictureSizeInfo::input> getSize_l() const { return mSize; }
-    std::shared_ptr<C2StreamIntraRefreshTuning::output> getIntraRefresh_l() const { return mIntraRefresh; }
+    std::shared_ptr<C2StreamIntraRefreshTuning::output> getIntraRefresh_l() const {
+        return mIntraRefresh;
+    }
     std::shared_ptr<C2StreamFrameRateInfo::output> getFrameRate_l() const { return mFrameRate; }
     std::shared_ptr<C2StreamBitrateInfo::output> getBitrate_l() const { return mBitrate; }
-    std::shared_ptr<C2StreamBitrateModeTuning::output> getBitrateMode_l() const { return mBitrateMode; }
-    std::shared_ptr<C2StreamRequestSyncFrameTuning::output> getRequestSync_l() const { return mRequestSync; }
-    std::shared_ptr<C2StreamTemporalLayeringTuning::output> getTemporalLayers_l() const { return mLayering; }
-    uint32_t getSyncFramePeriod() const {
-        if (mSyncFramePeriod->value < 0 || mSyncFramePeriod->value == INT64_MAX) {
-            return 0;
-        }
-        double period = mSyncFramePeriod->value / 1e6 * mFrameRate->value;
-        return (uint32_t)c2_max(c2_min(period + 0.5, double(UINT32_MAX)), 1.);
+    std::shared_ptr<C2StreamBitrateModeTuning::output> getBitrateMode_l() const {
+        return mBitrateMode;
     }
-    static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input> &me) {
-        (void)mayBlock;
-        if (me.v.range > C2Color::RANGE_OTHER) {
-                me.set().range = C2Color::RANGE_OTHER;
-        }
-        if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
-                me.set().primaries = C2Color::PRIMARIES_OTHER;
-        }
-        if (me.v.transfer > C2Color::TRANSFER_OTHER) {
-                me.set().transfer = C2Color::TRANSFER_OTHER;
-        }
-        if (me.v.matrix > C2Color::MATRIX_OTHER) {
-                me.set().matrix = C2Color::MATRIX_OTHER;
-        }
-        return C2R::Ok();
+    std::shared_ptr<C2StreamRequestSyncFrameTuning::output> getRequestSync_l() const {
+        return mRequestSync;
     }
+    std::shared_ptr<C2StreamTemporalLayeringTuning::output> getTemporalLayers_l() const {
+        return mLayering;
+    }
+    uint32_t getSyncFramePeriod() const;
+    static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input> &me);
     static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output> &me,
-                                       const C2P<C2StreamColorAspectsInfo::input> &coded) {
-        (void)mayBlock;
-        me.set().range = coded.v.range;
-        me.set().primaries = coded.v.primaries;
-        me.set().transfer = coded.v.transfer;
-        me.set().matrix = coded.v.matrix;
-        return C2R::Ok();
-    }
+                                       const C2P<C2StreamColorAspectsInfo::input> &coded);
 
    private:
     std::shared_ptr<C2StreamUsageTuning::input> mUsage;
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index 2cc7ab7..70e742c 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -59,6 +59,7 @@
     enum drc_compression_mode_t : int32_t;  ///< DRC compression mode
     enum drc_effect_type_t : int32_t;       ///< DRC effect type
     enum drc_album_mode_t : int32_t;        ///< DRC album mode
+    enum hdr_dynamic_metadata_type_t : uint32_t;  ///< HDR dynamic metadata type
     enum intra_refresh_mode_t : uint32_t;   ///< intra refresh modes
     enum level_t : uint32_t;                ///< coding level
     enum ordinal_key_t : uint32_t;          ///< work ordering keys
@@ -189,10 +190,13 @@
 
     kParamIndexPictureTypeMask,
     kParamIndexPictureType,
+    // deprecated
     kParamIndexHdr10PlusMetadata,
 
     kParamIndexPictureQuantization,
 
+    kParamIndexHdrDynamicMetadata,
+
     /* ------------------------------------ video components ------------------------------------ */
 
     kParamIndexFrameRate = C2_PARAM_INDEX_VIDEO_PARAM_START,
@@ -270,6 +274,9 @@
 
     // encoding quality requirements
     kParamIndexEncodingQualityLevel, // encoders, enum
+
+    // encoding statistics, average block qp of a frame
+    kParamIndexAverageBlockQuantization, // int32
 };
 
 }
@@ -680,6 +687,9 @@
     LEVEL_DV_MAIN_UHD_30,                       ///< Dolby Vision main tier uhd30
     LEVEL_DV_MAIN_UHD_48,                       ///< Dolby Vision main tier uhd48
     LEVEL_DV_MAIN_UHD_60,                       ///< Dolby Vision main tier uhd60
+    LEVEL_DV_MAIN_UHD_120,                      ///< Dolby Vision main tier uhd120
+    LEVEL_DV_MAIN_8K_30,                        ///< Dolby Vision main tier 8k30
+    LEVEL_DV_MAIN_8K_60,                        ///< Dolby Vision main tier 8k60
 
     LEVEL_DV_HIGH_HD_24 = _C2_PL_DV_BASE + 0x100,  ///< Dolby Vision high tier hd24
     LEVEL_DV_HIGH_HD_30,                        ///< Dolby Vision high tier hd30
@@ -690,6 +700,9 @@
     LEVEL_DV_HIGH_UHD_30,                       ///< Dolby Vision high tier uhd30
     LEVEL_DV_HIGH_UHD_48,                       ///< Dolby Vision high tier uhd48
     LEVEL_DV_HIGH_UHD_60,                       ///< Dolby Vision high tier uhd60
+    LEVEL_DV_HIGH_UHD_120,                      ///< Dolby Vision high tier uhd120
+    LEVEL_DV_HIGH_8K_30,                        ///< Dolby Vision high tier 8k30
+    LEVEL_DV_HIGH_8K_60,                        ///< Dolby Vision high tier 8k60
 
     // AV1 levels
     LEVEL_AV1_2    = _C2_PL_AV1_BASE ,          ///< AV1 Level 2
@@ -1602,16 +1615,54 @@
     C2FIELD(maxFall, "max-fall")
 };
 typedef C2StreamParam<C2Info, C2HdrStaticMetadataStruct, kParamIndexHdrStaticMetadata>
-        C2StreamHdrStaticInfo;
+        C2StreamHdrStaticMetadataInfo;
+typedef C2StreamParam<C2Info, C2HdrStaticMetadataStruct, kParamIndexHdrStaticMetadata>
+        C2StreamHdrStaticInfo;  // deprecated
 constexpr char C2_PARAMKEY_HDR_STATIC_INFO[] = "raw.hdr-static-info";
 
 /**
  * HDR10+ Metadata Info.
+ *
+ * Deprecated. Use C2StreamHdrDynamicMetadataInfo with
+ * HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_40
  */
 typedef C2StreamParam<C2Info, C2BlobValue, kParamIndexHdr10PlusMetadata>
-        C2StreamHdr10PlusInfo;
-constexpr char C2_PARAMKEY_INPUT_HDR10_PLUS_INFO[] = "input.hdr10-plus-info";
-constexpr char C2_PARAMKEY_OUTPUT_HDR10_PLUS_INFO[] = "output.hdr10-plus-info";
+        C2StreamHdr10PlusInfo;  // deprecated
+constexpr char C2_PARAMKEY_INPUT_HDR10_PLUS_INFO[] = "input.hdr10-plus-info";  // deprecated
+constexpr char C2_PARAMKEY_OUTPUT_HDR10_PLUS_INFO[] = "output.hdr10-plus-info";  // deprecated
+
+/**
+ * HDR dynamic metadata types
+ */
+C2ENUM(C2Config::hdr_dynamic_metadata_type_t, uint32_t,
+    HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_10,  ///< SMPTE ST 2094-10
+    HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_40,  ///< SMPTE ST 2094-40
+)
+
+struct C2HdrDynamicMetadataStruct {
+    inline C2HdrDynamicMetadataStruct() { memset(this, 0, sizeof(*this)); }
+
+    inline C2HdrDynamicMetadataStruct(
+            size_t flexCount, C2Config::hdr_dynamic_metadata_type_t type)
+        : type_(type) {
+        memset(data, 0, flexCount);
+    }
+
+    C2Config::hdr_dynamic_metadata_type_t type_;
+    uint8_t data[];
+
+    DEFINE_AND_DESCRIBE_FLEX_C2STRUCT(HdrDynamicMetadata, data)
+    C2FIELD(type_, "type")
+    C2FIELD(data, "data")
+};
+
+/**
+ * Dynamic HDR Metadata Info.
+ */
+typedef C2StreamParam<C2Info, C2HdrDynamicMetadataStruct, kParamIndexHdrDynamicMetadata>
+        C2StreamHdrDynamicMetadataInfo;
+constexpr char C2_PARAMKEY_INPUT_HDR_DYNAMIC_INFO[] = "input.hdr-dynamic-info";
+constexpr char C2_PARAMKEY_OUTPUT_HDR_DYNAMIC_INFO[] = "output.hdr-dynamic-info";
 
 /* ------------------------------------ block-based coding ----------------------------------- */
 
@@ -1673,7 +1724,7 @@
     SYNC_FRAME = (1 << 0),  ///< sync frame, e.g. IDR
     I_FRAME    = (1 << 1),  ///< intra frame that is completely encoded
     P_FRAME    = (1 << 2),  ///< inter predicted frame from previous frames
-    B_FRAME    = (1 << 3),  ///< backward predicted (out-of-order) frame
+    B_FRAME    = (1 << 3),  ///< bidirectional predicted (out-of-order) frame
 )
 
 /**
@@ -2411,6 +2462,17 @@
     S_HANDHELD = 1              // corresponds to VMAF=70
 );
 
+/**
+ * Video Encoding Statistics Export
+ */
+
+/**
+ * Average block QP exported from video encoder.
+ */
+typedef C2StreamParam<C2Info, C2SimpleValueStruct<int32_t>, kParamIndexAverageBlockQuantization>
+        C2AndroidStreamAverageBlockQuantizationInfo;
+constexpr char C2_PARAMKEY_AVERAGE_QP[] = "coded.average-qp";
+
 /// @}
 
 #endif  // C2CONFIG_H_
diff --git a/media/codec2/fuzzer/Android.bp b/media/codec2/fuzzer/Android.bp
index bd1fac6..147a52e 100644
--- a/media/codec2/fuzzer/Android.bp
+++ b/media/codec2/fuzzer/Android.bp
@@ -28,47 +28,22 @@
 cc_defaults {
     name: "C2Fuzzer-defaults",
 
+    defaults: [ "libcodec2-static-defaults" ],
+
     srcs: [
         "C2Fuzzer.cpp",
     ],
 
-    static_libs: [
-        "liblog",
-        "libion",
-        "libfmq",
-        "libbase",
-        "libutils",
-        "libcutils",
-        "libcodec2",
-        "libhidlbase",
-        "libdmabufheap",
-        "libcodec2_vndk",
-        "libnativewindow",
-        "libcodec2_soft_common",
-        "libsfplugin_ccodec_utils",
-        "libstagefright_foundation",
-        "libstagefright_bufferpool@2.0.1",
-        "android.hardware.graphics.mapper@2.0",
-        "android.hardware.graphics.mapper@3.0",
-        "android.hardware.media.bufferpool@2.0",
-        "android.hardware.graphics.allocator@2.0",
-        "android.hardware.graphics.allocator@3.0",
-        "android.hardware.graphics.bufferqueue@2.0",
-    ],
-
-    shared_libs: [
-        "libui",
-        "libdl",
-        "libbinder",
-        "libhardware",
-        "libvndksupport",
-        "libprocessgroup",
-    ],
-
     cflags: [
         "-Wall",
         "-Werror",
     ],
+
+    fuzz_config: {
+        cc: [
+            "wonsik@google.com",
+        ],
+    },
 }
 
 cc_fuzz {
diff --git a/media/codec2/fuzzer/C2Fuzzer.cpp b/media/codec2/fuzzer/C2Fuzzer.cpp
index 51e1013..e469d8b 100644
--- a/media/codec2/fuzzer/C2Fuzzer.cpp
+++ b/media/codec2/fuzzer/C2Fuzzer.cpp
@@ -194,12 +194,12 @@
   }
 
   std::vector<C2Param*> configParams;
+  C2StreamPictureSizeInfo::input inputSize(0u, kWidthOfVideo, kHeightOfVideo);
+  C2StreamSampleRateInfo::output sampleRateInfo(0u, kSamplingRateOfAudio);
+  C2StreamChannelCountInfo::output channelCountInfo(0u, kChannelsOfAudio);
   if (domain.value == DOMAIN_VIDEO) {
-    C2StreamPictureSizeInfo::input inputSize(0u, kWidthOfVideo, kHeightOfVideo);
     configParams.push_back(&inputSize);
   } else if (domain.value == DOMAIN_AUDIO) {
-    C2StreamSampleRateInfo::output sampleRateInfo(0u, kSamplingRateOfAudio);
-    C2StreamChannelCountInfo::output channelCountInfo(0u, kChannelsOfAudio);
     configParams.push_back(&sampleRateInfo);
     configParams.push_back(&channelCountInfo);
   }
@@ -239,17 +239,17 @@
 }
 
 void Codec2Fuzzer::decodeFrames(const uint8_t* data, size_t size) {
-  mBufferSource = new BufferSource(data, size);
-  if (!mBufferSource) {
+  std::unique_ptr<BufferSource> bufferSource = std::make_unique<BufferSource>(data, size);
+  if (!bufferSource) {
     return;
   }
-  mBufferSource->parse();
+  bufferSource->parse();
   c2_status_t status = C2_OK;
   size_t numFrames = 0;
-  while (!mBufferSource->isEos()) {
+  while (!bufferSource->isEos()) {
     uint8_t* frame = nullptr;
     size_t frameSize = 0;
-    FrameData frameData = mBufferSource->getFrame();
+    FrameData frameData = bufferSource->getFrame();
     frame = std::get<0>(frameData);
     frameSize = std::get<1>(frameData);
 
@@ -298,7 +298,6 @@
   mConditionalVariable.wait_for(waitForDecodeComplete, kC2FuzzerTimeOut, [this] { return mEos; });
   std::list<std::unique_ptr<C2Work>> c2flushedWorks;
   mComponent->flush_sm(C2Component::FLUSH_COMPONENT, &c2flushedWorks);
-  delete mBufferSource;
 }
 
 extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
diff --git a/media/codec2/fuzzer/C2Fuzzer.h b/media/codec2/fuzzer/C2Fuzzer.h
index d5ac81a..da76885 100644
--- a/media/codec2/fuzzer/C2Fuzzer.h
+++ b/media/codec2/fuzzer/C2Fuzzer.h
@@ -104,7 +104,6 @@
     static constexpr size_t kMarkerSuffixSize = 3;
   };
 
-  BufferSource* mBufferSource;
   bool mEos = false;
   C2BlockPool::local_id_t mBlockPoolId;
 
diff --git a/media/codec2/hidl/1.0/utils/Component.cpp b/media/codec2/hidl/1.0/utils/Component.cpp
index 082c5e3..df30dba 100644
--- a/media/codec2/hidl/1.0/utils/Component.cpp
+++ b/media/codec2/hidl/1.0/utils/Component.cpp
@@ -482,6 +482,37 @@
     if (res != C2_OK) {
         mInit = res;
     }
+
+    struct ListenerDeathRecipient : public HwDeathRecipient {
+        ListenerDeathRecipient(const wp<Component>& comp)
+            : mComponent{comp} {
+        }
+
+        virtual void serviceDied(
+                uint64_t /* cookie */,
+                const wp<::android::hidl::base::V1_0::IBase>& /* who */
+                ) override {
+            auto strongComponent = mComponent.promote();
+            if (strongComponent) {
+                LOG(INFO) << "Client died ! release the component !!";
+                strongComponent->release();
+            } else {
+                LOG(ERROR) << "Client died ! no component to release !!";
+            }
+        }
+
+        wp<Component> mComponent;
+    };
+
+    mDeathRecipient = new ListenerDeathRecipient(self);
+    Return<bool> transStatus = mListener->linkToDeath(
+            mDeathRecipient, 0);
+    if (!transStatus.isOk()) {
+        LOG(ERROR) << "Listener linkToDeath() transaction failed.";
+    }
+    if (!static_cast<bool>(transStatus)) {
+        LOG(DEBUG) << "Listener linkToDeath() call failed.";
+    }
 }
 
 Component::~Component() {
diff --git a/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/Component.h b/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/Component.h
index 86dccd0..e343655 100644
--- a/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/Component.h
+++ b/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/Component.h
@@ -132,6 +132,9 @@
     friend struct ComponentStore;
 
     struct Listener;
+
+    using HwDeathRecipient = ::android::hardware::hidl_death_recipient;
+    sp<HwDeathRecipient> mDeathRecipient;
 };
 
 }  // namespace utils
diff --git a/media/codec2/hidl/1.0/vts/.clang-format b/media/codec2/hidl/1.0/vts/.clang-format
deleted file mode 120000
index 136279c..0000000
--- a/media/codec2/hidl/1.0/vts/.clang-format
+++ /dev/null
@@ -1 +0,0 @@
-../../../../../../../build/soong/scripts/system-clang-format
\ No newline at end of file
diff --git a/media/codec2/hidl/1.0/vts/OWNERS b/media/codec2/hidl/1.0/vts/OWNERS
index dbe89cf..32b11b8 100644
--- a/media/codec2/hidl/1.0/vts/OWNERS
+++ b/media/codec2/hidl/1.0/vts/OWNERS
@@ -1,8 +1,5 @@
+# Bug component: 25690
 # Media team
 lajos@google.com
-pawin@google.com
 taklee@google.com
 wonsik@google.com
-
-# VTS team
-dshi@google.com
diff --git a/media/codec2/hidl/1.1/utils/Component.cpp b/media/codec2/hidl/1.1/utils/Component.cpp
index 1d7d3d8..2dd922f 100644
--- a/media/codec2/hidl/1.1/utils/Component.cpp
+++ b/media/codec2/hidl/1.1/utils/Component.cpp
@@ -489,6 +489,37 @@
     if (res != C2_OK) {
         mInit = res;
     }
+
+    struct ListenerDeathRecipient : public HwDeathRecipient {
+        ListenerDeathRecipient(const wp<Component>& comp)
+            : component{comp} {
+        }
+
+        virtual void serviceDied(
+                uint64_t /* cookie */,
+                const wp<::android::hidl::base::V1_0::IBase>& /* who */
+                ) override {
+            auto strongComponent = component.promote();
+            if (strongComponent) {
+                LOG(INFO) << "Client died ! release the component !!";
+                strongComponent->release();
+            } else {
+                LOG(ERROR) << "Client died ! no component to release !!";
+            }
+        }
+
+        wp<Component> component;
+    };
+
+    mDeathRecipient = new ListenerDeathRecipient(self);
+    Return<bool> transStatus = mListener->linkToDeath(
+            mDeathRecipient, 0);
+    if (!transStatus.isOk()) {
+        LOG(ERROR) << "Listener linkToDeath() transaction failed.";
+    }
+    if (!static_cast<bool>(transStatus)) {
+        LOG(DEBUG) << "Listener linkToDeath() call failed.";
+    }
 }
 
 Component::~Component() {
diff --git a/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/Component.h b/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/Component.h
index 16c81d4..1c8c20c 100644
--- a/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/Component.h
+++ b/media/codec2/hidl/1.1/utils/include/codec2/hidl/1.1/Component.h
@@ -137,6 +137,9 @@
     friend struct ComponentStore;
 
     struct Listener;
+
+    using HwDeathRecipient = ::android::hardware::hidl_death_recipient;
+    sp<HwDeathRecipient> mDeathRecipient;
 };
 
 } // namespace utils
diff --git a/media/codec2/hidl/1.2/utils/Component.cpp b/media/codec2/hidl/1.2/utils/Component.cpp
index 8924e6d..7994d32 100644
--- a/media/codec2/hidl/1.2/utils/Component.cpp
+++ b/media/codec2/hidl/1.2/utils/Component.cpp
@@ -520,6 +520,37 @@
     if (res != C2_OK) {
         mInit = res;
     }
+
+    struct ListenerDeathRecipient : public HwDeathRecipient {
+        ListenerDeathRecipient(const wp<Component>& comp)
+            : component{comp} {
+        }
+
+        virtual void serviceDied(
+                uint64_t /* cookie */,
+                const wp<::android::hidl::base::V1_0::IBase>& /* who */
+                ) override {
+            auto strongComponent = component.promote();
+            if (strongComponent) {
+                LOG(INFO) << "Client died ! release the component !!";
+                strongComponent->release();
+            } else {
+                LOG(ERROR) << "Client died ! no component to release !!";
+            }
+        }
+
+        wp<Component> component;
+    };
+
+    mDeathRecipient = new ListenerDeathRecipient(self);
+    Return<bool> transStatus = mListener->linkToDeath(
+            mDeathRecipient, 0);
+    if (!transStatus.isOk()) {
+        LOG(ERROR) << "Listener linkToDeath() transaction failed.";
+    }
+    if (!static_cast<bool>(transStatus)) {
+        LOG(DEBUG) << "Listener linkToDeath() call failed.";
+    }
 }
 
 Component::~Component() {
diff --git a/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h
index 7937664..d0972ee 100644
--- a/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h
+++ b/media/codec2/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h
@@ -142,6 +142,10 @@
     friend struct ComponentStore;
 
     struct Listener;
+
+    using HwDeathRecipient = ::android::hardware::hidl_death_recipient;
+    sp<HwDeathRecipient> mDeathRecipient;
+
 };
 
 } // namespace utils
diff --git a/media/codec2/hidl/plugin/FilterWrapper.cpp b/media/codec2/hidl/plugin/FilterWrapper.cpp
index 70c63f2..d5124fd 100644
--- a/media/codec2/hidl/plugin/FilterWrapper.cpp
+++ b/media/codec2/hidl/plugin/FilterWrapper.cpp
@@ -49,6 +49,11 @@
             std::weak_ptr<FilterWrapper> filterWrapper)
         : mIntf(intf), mFilterWrapper(filterWrapper) {
         takeFilters(std::move(filters));
+        for (size_t i = 0; i < mFilters.size(); ++i) {
+            mControlParamTypes.insert(
+                    mFilters[i].desc.controlParams.begin(),
+                    mFilters[i].desc.controlParams.end());
+        }
     }
 
     ~WrappedDecoderInterface() override = default;
@@ -187,7 +192,12 @@
         }
 
         std::vector<C2Param *> stackParamsForIntf;
-        std::copy_n(stackParamsList.begin(), stackParamsList.size(), stackParamsForIntf.begin());
+        for (C2Param *param : stackParamsList) {
+            if (mControlParamTypes.count(param->type()) != 0) {
+                continue;
+            }
+            stackParamsForIntf.push_back(param);
+        }
 
         // Gather heap params that did not get queried from the filter interfaces above.
         // These need to be queried from the decoder interface.
@@ -197,6 +207,9 @@
             if (mTypeToIndexForQuery.find(type) != mTypeToIndexForQuery.end()) {
                 continue;
             }
+            if (mControlParamTypes.count(type) != 0) {
+                continue;
+            }
             heapParamIndicesForIntf.push_back(heapParamIndices[j]);
         }
 
@@ -251,11 +264,14 @@
             std::vector<C2Param *> paramsForFilter;
             for (C2Param* param : params) {
                 auto it = mTypeToIndexForConfig.find(param->type().type());
-                if (it != mTypeToIndexForConfig.end() && it->second != i) {
+                if (it == mTypeToIndexForConfig.end() || it->second != i) {
                     continue;
                 }
                 paramsForFilter.push_back(param);
             }
+            if (paramsForFilter.empty()) {
+                continue;
+            }
             c2_status_t err = filter->config_vb(paramsForFilter, mayBlock, &filterFailures);
             if (err != C2_OK) {
                 LOG(err == C2_BAD_INDEX ? VERBOSE : WARNING)
@@ -356,6 +372,7 @@
     std::weak_ptr<FilterWrapper> mFilterWrapper;
     std::map<uint32_t, size_t> mTypeToIndexForQuery;
     std::map<uint32_t, size_t> mTypeToIndexForConfig;
+    std::set<C2Param::Type> mControlParamTypes;
 
     c2_status_t transferParams_l(
             const std::shared_ptr<C2ComponentInterface> &curr,
@@ -430,6 +447,10 @@
             LOG(DEBUG) << "WrappedDecoderInterface: FilterWrapper not found";
             return C2_OK;
         }
+        if (!filterWrapper->isFilteringEnabled(next)) {
+            LOG(VERBOSE) << "WrappedDecoderInterface: filtering not enabled";
+            return C2_OK;
+        }
         std::vector<std::unique_ptr<C2Param>> params;
         c2_status_t err = filterWrapper->queryParamsForPreviousComponent(next, &params);
         if (err != C2_OK) {
@@ -594,6 +615,8 @@
             }
         }
         mRunningFilters.clear();
+        std::vector<FilterWrapper::Component> filters(mFilters);
+        mIntf->takeFilters(std::move(filters));
         return result;
     }
 
diff --git a/media/codec2/hidl/services/Android.bp b/media/codec2/hidl/services/Android.bp
index bb9f51f..b36e80a 100644
--- a/media/codec2/hidl/services/Android.bp
+++ b/media/codec2/hidl/services/Android.bp
@@ -52,7 +52,7 @@
 
     // minijail is used to protect against unexpected system calls.
     shared_libs: [
-        "libavservices_minijail_vendor",
+        "libavservices_minijail",
         "libbinder",
     ],
     required: ["android.hardware.media.c2@1.2-default-seccomp_policy"],
diff --git a/media/codec2/hidl/services/android.hardware.media.c2@1.2-default-service.rc b/media/codec2/hidl/services/android.hardware.media.c2@1.2-default-service.rc
index 03f6e3d..12da593 100644
--- a/media/codec2/hidl/services/android.hardware.media.c2@1.2-default-service.rc
+++ b/media/codec2/hidl/services/android.hardware.media.c2@1.2-default-service.rc
@@ -3,5 +3,5 @@
     user mediacodec
     group camera mediadrm drmrpc
     ioprio rt 4
-    writepid /dev/cpuset/foreground/tasks
+    task_profiles ProcessCapacityHigh
 
diff --git a/media/codec2/sfplugin/Android.bp b/media/codec2/sfplugin/Android.bp
index 2bc748f..39fa4fc 100644
--- a/media/codec2/sfplugin/Android.bp
+++ b/media/codec2/sfplugin/Android.bp
@@ -42,12 +42,14 @@
         "android.hardware.drm@1.0",
         "android.hardware.media.c2@1.0",
         "android.hardware.media.omx@1.0",
+        "android.hardware.graphics.mapper@4.0",
         "libbase",
         "libbinder",
         "libcodec2",
         "libcodec2_client",
         "libcodec2_vndk",
         "libcutils",
+        "libgralloctypes",
         "libgui",
         "libhidlallocatorutils",
         "libhidlbase",
@@ -59,6 +61,7 @@
         "libstagefright_codecbase",
         "libstagefright_foundation",
         "libstagefright_omx",
+	"libstagefright_surface_utils",
         "libstagefright_xmlparser",
         "libui",
         "libutils",
diff --git a/media/codec2/sfplugin/C2OMXNode.cpp b/media/codec2/sfplugin/C2OMXNode.cpp
index c049187..ed7d69c 100644
--- a/media/codec2/sfplugin/C2OMXNode.cpp
+++ b/media/codec2/sfplugin/C2OMXNode.cpp
@@ -42,6 +42,7 @@
 
 #include "utils/Codec2Mapper.h"
 #include "C2OMXNode.h"
+#include "Codec2Buffer.h"
 
 namespace android {
 
@@ -466,6 +467,18 @@
                 new Buffer2D(block->share(
                         C2Rect(block->width(), block->height()), ::C2Fence())));
         work->input.buffers.push_back(c2Buffer);
+        std::shared_ptr<C2StreamHdrStaticInfo::input> staticInfo;
+        std::shared_ptr<C2StreamHdrDynamicMetadataInfo::input> dynamicInfo;
+        GetHdrMetadataFromGralloc4Handle(
+                block->handle(),
+                &staticInfo,
+                &dynamicInfo);
+        if (staticInfo && *staticInfo) {
+            c2Buffer->setInfo(staticInfo);
+        }
+        if (dynamicInfo && *dynamicInfo) {
+            c2Buffer->setInfo(dynamicInfo);
+        }
     }
     work->worklets.clear();
     work->worklets.emplace_back(new C2Worklet);
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 82460c9..c0a6816 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -212,9 +212,8 @@
                 (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits,
                 &usage, sizeof(usage));
 
-        mSource->configure(
-                mOmxNode, static_cast<hardware::graphics::common::V1_0::Dataspace>(mDataSpace));
-        return OK;
+        return GetStatus(mSource->configure(
+                mOmxNode, static_cast<hardware::graphics::common::V1_0::Dataspace>(mDataSpace)));
     }
 
     void disconnect() override {
@@ -872,6 +871,11 @@
                         }
                         config->mTunneled = true;
                     }
+
+                    int32_t pushBlankBuffersOnStop = 0;
+                    if (msg->findInt32(KEY_PUSH_BLANK_BUFFERS_ON_STOP, &pushBlankBuffersOnStop)) {
+                        config->mPushBlankBuffersOnStop = pushBlankBuffersOnStop == 1;
+                    }
                 }
             }
             setSurface(surface);
@@ -1018,29 +1022,31 @@
             } else {
                 pixelFormatInfo = nullptr;
             }
-            std::optional<uint32_t> flexPixelFormat{};
-            std::optional<uint32_t> flexPlanarPixelFormat{};
-            std::optional<uint32_t> flexSemiPlanarPixelFormat{};
+            // bit depth -> format
+            std::map<uint32_t, uint32_t> flexPixelFormat;
+            std::map<uint32_t, uint32_t> flexPlanarPixelFormat;
+            std::map<uint32_t, uint32_t> flexSemiPlanarPixelFormat;
             if (pixelFormatInfo && *pixelFormatInfo) {
                 for (size_t i = 0; i < pixelFormatInfo->flexCount(); ++i) {
                     const C2FlexiblePixelFormatDescriptorStruct &desc =
                         pixelFormatInfo->m.values[i];
-                    if (desc.bitDepth != 8
-                            || desc.subsampling != C2Color::YUV_420
+                    if (desc.subsampling != C2Color::YUV_420
                             // TODO(b/180076105): some device report wrong layout
                             // || desc.layout == C2Color::INTERLEAVED_PACKED
                             // || desc.layout == C2Color::INTERLEAVED_ALIGNED
                             || desc.layout == C2Color::UNKNOWN_LAYOUT) {
                         continue;
                     }
-                    if (!flexPixelFormat) {
-                        flexPixelFormat = desc.pixelFormat;
+                    if (flexPixelFormat.count(desc.bitDepth) == 0) {
+                        flexPixelFormat.emplace(desc.bitDepth, desc.pixelFormat);
                     }
-                    if (desc.layout == C2Color::PLANAR_PACKED && !flexPlanarPixelFormat) {
-                        flexPlanarPixelFormat = desc.pixelFormat;
+                    if (desc.layout == C2Color::PLANAR_PACKED
+                            && flexPlanarPixelFormat.count(desc.bitDepth) == 0) {
+                        flexPlanarPixelFormat.emplace(desc.bitDepth, desc.pixelFormat);
                     }
-                    if (desc.layout == C2Color::SEMIPLANAR_PACKED && !flexSemiPlanarPixelFormat) {
-                        flexSemiPlanarPixelFormat = desc.pixelFormat;
+                    if (desc.layout == C2Color::SEMIPLANAR_PACKED
+                            && flexSemiPlanarPixelFormat.count(desc.bitDepth) == 0) {
+                        flexSemiPlanarPixelFormat.emplace(desc.bitDepth, desc.pixelFormat);
                     }
                 }
             }
@@ -1050,7 +1056,7 @@
                 if (!(config->mDomain & Config::IS_ENCODER)) {
                     if (surface == nullptr) {
                         const char *prefix = "";
-                        if (flexSemiPlanarPixelFormat) {
+                        if (flexSemiPlanarPixelFormat.count(8) != 0) {
                             format = COLOR_FormatYUV420SemiPlanar;
                             prefix = "semi-";
                         } else {
@@ -1067,17 +1073,34 @@
                 if ((config->mDomain & Config::IS_ENCODER) || !surface) {
                     switch (format) {
                         case COLOR_FormatYUV420Flexible:
-                            format = flexPixelFormat.value_or(COLOR_FormatYUV420Planar);
+                            format = COLOR_FormatYUV420Planar;
+                            if (flexPixelFormat.count(8) != 0) {
+                                format = flexPixelFormat[8];
+                            }
                             break;
                         case COLOR_FormatYUV420Planar:
                         case COLOR_FormatYUV420PackedPlanar:
-                            format = flexPlanarPixelFormat.value_or(
-                                    flexPixelFormat.value_or(format));
+                            if (flexPlanarPixelFormat.count(8) != 0) {
+                                format = flexPlanarPixelFormat[8];
+                            } else if (flexPixelFormat.count(8) != 0) {
+                                format = flexPixelFormat[8];
+                            }
                             break;
                         case COLOR_FormatYUV420SemiPlanar:
                         case COLOR_FormatYUV420PackedSemiPlanar:
-                            format = flexSemiPlanarPixelFormat.value_or(
-                                    flexPixelFormat.value_or(format));
+                            if (flexSemiPlanarPixelFormat.count(8) != 0) {
+                                format = flexSemiPlanarPixelFormat[8];
+                            } else if (flexPixelFormat.count(8) != 0) {
+                                format = flexPixelFormat[8];
+                            }
+                            break;
+                        case COLOR_FormatYUVP010:
+                            format = COLOR_FormatYUVP010;
+                            if (flexSemiPlanarPixelFormat.count(10) != 0) {
+                                format = flexSemiPlanarPixelFormat[10];
+                            } else if (flexPixelFormat.count(10) != 0) {
+                                format = flexPixelFormat[10];
+                            }
                             break;
                         default:
                             // No-op
@@ -1213,11 +1236,25 @@
         std::initializer_list<C2Param::Index> indices {
             colorAspectsRequestIndex.withStream(0u),
         };
-        c2_status_t c2err = comp->query(
-                { &usage, &maxInputSize, &prepend },
-                indices,
-                C2_DONT_BLOCK,
-                &params);
+        int32_t colorTransferRequest = 0;
+        if (config->mDomain & (Config::IS_IMAGE | Config::IS_VIDEO)
+                && !sdkParams->findInt32("color-transfer-request", &colorTransferRequest)) {
+            colorTransferRequest = 0;
+        }
+        c2_status_t c2err = C2_OK;
+        if (colorTransferRequest != 0) {
+            c2err = comp->query(
+                    { &usage, &maxInputSize, &prepend },
+                    indices,
+                    C2_DONT_BLOCK,
+                    &params);
+        } else {
+            c2err = comp->query(
+                    { &usage, &maxInputSize, &prepend },
+                    {},
+                    C2_DONT_BLOCK,
+                    &params);
+        }
         if (c2err != C2_OK && c2err != C2_BAD_INDEX) {
             ALOGE("Failed to query component interface: %d", c2err);
             return UNKNOWN_ERROR;
@@ -1332,8 +1369,8 @@
             }
         }
 
-        // set channel-mask
         if (config->mDomain & Config::IS_AUDIO) {
+            // set channel-mask
             int32_t mask;
             if (msg->findInt32(KEY_CHANNEL_MASK, &mask)) {
                 if (config->mDomain & Config::IS_ENCODER) {
@@ -1342,6 +1379,15 @@
                     config->mOutputFormat->setInt32(KEY_CHANNEL_MASK, mask);
                 }
             }
+
+            // set PCM encoding
+            int32_t pcmEncoding = kAudioEncodingPcm16bit;
+            msg->findInt32(KEY_PCM_ENCODING, &pcmEncoding);
+            if (encoder) {
+                config->mInputFormat->setInt32("android._config-pcm-encoding", pcmEncoding);
+            } else {
+                config->mOutputFormat->setInt32("android._config-pcm-encoding", pcmEncoding);
+            }
         }
 
         std::unique_ptr<C2Param> colorTransferRequestParam;
@@ -1351,11 +1397,6 @@
                 colorTransferRequestParam = std::move(param);
             }
         }
-        int32_t colorTransferRequest = 0;
-        if (config->mDomain & (Config::IS_IMAGE | Config::IS_VIDEO)
-                && !sdkParams->findInt32("color-transfer-request", &colorTransferRequest)) {
-            colorTransferRequest = 0;
-        }
 
         if (colorTransferRequest != 0) {
             if (colorTransferRequestParam && *colorTransferRequestParam) {
@@ -1421,6 +1462,31 @@
             }
         }
 
+        if (config->mTunneled) {
+            config->mOutputFormat->setInt32("android._tunneled", 1);
+        }
+
+        // Convert an encoding statistics level to corresponding encoding statistics
+        // kinds
+        int32_t encodingStatisticsLevel = VIDEO_ENCODING_STATISTICS_LEVEL_NONE;
+        if ((config->mDomain & Config::IS_ENCODER)
+            && (config->mDomain & Config::IS_VIDEO)
+            && msg->findInt32(KEY_VIDEO_ENCODING_STATISTICS_LEVEL, &encodingStatisticsLevel)) {
+            // Higher level include all the enc stats belong to lower level.
+            switch (encodingStatisticsLevel) {
+                // case VIDEO_ENCODING_STATISTICS_LEVEL_2: // reserved for the future level 2
+                                                           // with more enc stat kinds
+                // Future extended encoding statistics for the level 2 should be added here
+                case VIDEO_ENCODING_STATISTICS_LEVEL_1:
+                    config->subscribeToConfigUpdate(comp,
+                        {kParamIndexAverageBlockQuantization, kParamIndexPictureType});
+                    break;
+                case VIDEO_ENCODING_STATISTICS_LEVEL_NONE:
+                    break;
+            }
+        }
+        ALOGD("encoding statistics level = %d", encodingStatisticsLevel);
+
         ALOGD("setup formats input: %s",
                 config->mInputFormat->debugString().c_str());
         ALOGD("setup formats output: %s",
@@ -1742,9 +1808,16 @@
     if (tryAndReportOnError(setRunning) != OK) {
         return;
     }
+
+    err2 = mChannel->requestInitialInputBuffers();
+
+    if (err2 != OK) {
+        ALOGE("Initial request for Input Buffers failed");
+        mCallback->onError(err2,ACTION_CODE_FATAL);
+        return;
+    }
     mCallback->onStartCompleted();
 
-    (void)mChannel->requestInitialInputBuffers();
 }
 
 void CCodec::initiateShutdown(bool keepComponentAllocated) {
@@ -1770,7 +1843,13 @@
         }
         state->set(STOPPING);
     }
-
+    {
+        Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+        const std::unique_ptr<Config> &config = *configLocked;
+        if (config->mPushBlankBuffersOnStop) {
+            mChannel->pushBlankBufferToOutputSurface();
+        }
+    }
     mChannel->reset();
     (new AMessage(kWhatStop, this))->post();
 }
@@ -1858,6 +1937,13 @@
             config->mInputSurfaceDataspace = HAL_DATASPACE_UNKNOWN;
         }
     }
+    {
+        Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+        const std::unique_ptr<Config> &config = *configLocked;
+        if (config->mPushBlankBuffersOnStop) {
+            mChannel->pushBlankBufferToOutputSurface();
+        }
+    }
 
     mChannel->reset();
     // thiz holds strong ref to this while the thread is running.
@@ -1896,9 +1982,11 @@
     {
         Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
         const std::unique_ptr<Config> &config = *configLocked;
+        sp<ANativeWindow> nativeWindow = static_cast<ANativeWindow *>(surface.get());
+        status_t err = OK;
+
         if (config->mTunneled && config->mSidebandHandle != nullptr) {
-            sp<ANativeWindow> nativeWindow = static_cast<ANativeWindow *>(surface.get());
-            status_t err = native_window_set_sideband_stream(
+            err = native_window_set_sideband_stream(
                     nativeWindow.get(),
                     const_cast<native_handle_t *>(config->mSidebandHandle->handle()));
             if (err != OK) {
@@ -1906,6 +1994,15 @@
                         nativeWindow.get(), config->mSidebandHandle->handle(), err);
                 return err;
             }
+        } else {
+            // Explicitly reset the sideband handle of the window for
+            // non-tunneled video in case the window was previously used
+            // for a tunneled video playback.
+            err = native_window_set_sideband_stream(nativeWindow.get(), nullptr);
+            if (err != OK) {
+                ALOGE("native_window_set_sideband_stream(nullptr) failed! (err %d).", err);
+                return err;
+            }
         }
     }
     return mChannel->setSurface(surface);
@@ -2546,7 +2643,10 @@
         std::vector<std::unique_ptr<C2Param>> params;
         err = intf->query(
                 {&mApiFeatures},
-                {C2PortAllocatorsTuning::input::PARAM_TYPE},
+                {
+                    C2StreamBufferTypeSetting::input::PARAM_TYPE,
+                    C2PortAllocatorsTuning::input::PARAM_TYPE
+                },
                 C2_MAY_BLOCK,
                 &params);
         if (err != C2_OK && err != C2_BAD_INDEX) {
@@ -2559,7 +2659,10 @@
             if (!param) {
                 continue;
             }
-            if (param->type() == C2PortAllocatorsTuning::input::PARAM_TYPE) {
+            if (param->type() == C2StreamBufferTypeSetting::input::PARAM_TYPE) {
+                mInputStreamFormat.reset(
+                        C2StreamBufferTypeSetting::input::From(param));
+            } else if (param->type() == C2PortAllocatorsTuning::input::PARAM_TYPE) {
                 mInputAllocators.reset(
                         C2PortAllocatorsTuning::input::From(param));
             }
@@ -2579,6 +2682,16 @@
         return mApiFeatures;
     }
 
+    const C2StreamBufferTypeSetting::input &getInputStreamFormat() const {
+        static std::unique_ptr<C2StreamBufferTypeSetting::input> sInvalidated = []{
+            std::unique_ptr<C2StreamBufferTypeSetting::input> param;
+            param.reset(new C2StreamBufferTypeSetting::input(0u, C2BufferData::INVALID));
+            param->invalidate();
+            return param;
+        }();
+        return mInputStreamFormat ? *mInputStreamFormat : *sInvalidated;
+    }
+
     const C2PortAllocatorsTuning::input &getInputAllocators() const {
         static std::unique_ptr<C2PortAllocatorsTuning::input> sInvalidated = []{
             std::unique_ptr<C2PortAllocatorsTuning::input> param =
@@ -2594,6 +2707,7 @@
 
     std::vector<C2FieldSupportedValuesQuery> mFields;
     C2ApiFeaturesSetting mApiFeatures;
+    std::unique_ptr<C2StreamBufferTypeSetting::input> mInputStreamFormat;
     std::unique_ptr<C2PortAllocatorsTuning::input> mInputAllocators;
 };
 
@@ -2635,6 +2749,24 @@
         if (intfCache.initCheck() != OK) {
             continue;
         }
+        const C2StreamBufferTypeSetting::input &streamFormat = intfCache.getInputStreamFormat();
+        if (streamFormat) {
+            C2Allocator::type_t allocatorType = C2Allocator::LINEAR;
+            if (streamFormat.value == C2BufferData::GRAPHIC
+                    || streamFormat.value == C2BufferData::GRAPHIC_CHUNKS) {
+                allocatorType = C2Allocator::GRAPHIC;
+            }
+
+            if (type != allocatorType) {
+                // requested type is not supported at input allocators
+                ids->clear();
+                ids->insert(defaultAllocatorId);
+                ALOGV("name(%s) does not support a type(0x%x) as input allocator."
+                        " uses default allocator id(%d)", name.c_str(), type, defaultAllocatorId);
+                break;
+            }
+        }
+
         const C2PortAllocatorsTuning::input &allocators = intfCache.getInputAllocators();
         if (firstIteration) {
             firstIteration = false;
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 6f7b7f7..62a1d02 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -15,8 +15,11 @@
  */
 
 //#define LOG_NDEBUG 0
+#include <utils/Errors.h>
 #define LOG_TAG "CCodecBufferChannel"
+#define ATRACE_TAG  ATRACE_TAG_VIDEO
 #include <utils/Log.h>
+#include <utils/Trace.h>
 
 #include <algorithm>
 #include <atomic>
@@ -44,9 +47,9 @@
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/foundation/AUtils.h>
 #include <media/stagefright/foundation/hexdump.h>
-#include <media/stagefright/MediaCodec.h>
 #include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/SkipCutBuffer.h>
+#include <media/stagefright/SurfaceUtils.h>
 #include <media/MediaCodecBuffer.h>
 #include <mediadrm/ICrypto.h>
 #include <system/window.h>
@@ -254,7 +257,7 @@
                 bool released = input->buffers->releaseBuffer(buffer, nullptr, true);
                 ALOGV("[%s] queueInputBuffer: buffer copied; %sreleased",
                       mName, released ? "" : "not ");
-                buffer.clear();
+                buffer = copy;
             } else {
                 ALOGW("[%s] queueInputBuffer: failed to copy a buffer; this may cause input "
                       "buffer starvation on component.", mName);
@@ -282,6 +285,12 @@
             }
         }
     } else if (eos) {
+        Mutexed<Input>::Locked input(mInput);
+        if (input->frameReassembler) {
+            usesFrameReassembler = true;
+            // drain any pending items with eos
+            input->frameReassembler.process(buffer, &items);
+        }
         flags |= C2FrameData::FLAG_END_OF_STREAM;
     }
     if (usesFrameReassembler) {
@@ -321,6 +330,8 @@
     }
     c2_status_t err = C2_OK;
     if (!items.empty()) {
+        ScopedTrace trace(ATRACE_TAG, android::base::StringPrintf(
+                "CCodecBufferChannel::queue(%s@ts=%lld)", mName, (long long)timeUs).c_str());
         {
             Mutexed<PipelineWatcher>::Locked watcher(mPipelineWatcher);
             PipelineWatcher::Clock::time_point now = PipelineWatcher::Clock::now();
@@ -341,10 +352,10 @@
     } else {
         Mutexed<Input>::Locked input(mInput);
         bool released = false;
-        if (buffer) {
-            released = input->buffers->releaseBuffer(buffer, nullptr, true);
-        } else if (copy) {
+        if (copy) {
             released = input->extraBuffers.releaseSlot(copy, nullptr, true);
+        } else if (buffer) {
+            released = input->buffers->releaseBuffer(buffer, nullptr, true);
         }
         ALOGV("[%s] queueInputBuffer: buffer%s %sreleased",
               mName, (buffer == nullptr) ? "(copy)" : "", released ? "" : "not ");
@@ -834,6 +845,35 @@
         hdr10PlusInfo.reset();
     }
 
+    // HDR dynamic info
+    std::shared_ptr<const C2StreamHdrDynamicMetadataInfo::output> hdrDynamicInfo =
+        std::static_pointer_cast<const C2StreamHdrDynamicMetadataInfo::output>(
+                c2Buffer->getInfo(C2StreamHdrDynamicMetadataInfo::output::PARAM_TYPE));
+    // TODO: make this sticky & enable unset
+    if (hdrDynamicInfo && hdrDynamicInfo->flexCount() == 0) {
+        hdrDynamicInfo.reset();
+    }
+
+    if (hdr10PlusInfo) {
+        // C2StreamHdr10PlusInfo is deprecated; components should use
+        // C2StreamHdrDynamicMetadataInfo
+        // TODO: #metric
+        if (hdrDynamicInfo) {
+            // It is unexpected that C2StreamHdr10PlusInfo and
+            // C2StreamHdrDynamicMetadataInfo is both present.
+            // C2StreamHdrDynamicMetadataInfo takes priority.
+            // TODO: #metric
+        } else {
+            std::shared_ptr<C2StreamHdrDynamicMetadataInfo::output> info =
+                    C2StreamHdrDynamicMetadataInfo::output::AllocShared(
+                            hdr10PlusInfo->flexCount(),
+                            0u,
+                            C2Config::HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_40);
+            memcpy(info->m.data, hdr10PlusInfo->m.value, hdr10PlusInfo->flexCount());
+            hdrDynamicInfo = info;
+        }
+    }
+
     std::vector<C2ConstGraphicBlock> blocks = c2Buffer->data().graphicBlocks();
     if (blocks.size() != 1u) {
         ALOGD("[%s] expected 1 graphic block, but got %zu", mName, blocks.size());
@@ -853,7 +893,7 @@
             videoScalingMode,
             transform,
             Fence::NO_FENCE, 0);
-    if (hdrStaticInfo || hdr10PlusInfo) {
+    if (hdrStaticInfo || hdrDynamicInfo) {
         HdrMetadata hdr;
         if (hdrStaticInfo) {
             // If mastering max and min luminance fields are 0, do not use them.
@@ -889,14 +929,22 @@
                 hdr.validTypes |= HdrMetadata::CTA861_3;
                 hdr.cta8613 = cta861_meta;
             }
+
+            // does not have valid info
+            if (!(hdr.validTypes & (HdrMetadata::SMPTE2086 | HdrMetadata::CTA861_3))) {
+                hdrStaticInfo.reset();
+            }
         }
-        if (hdr10PlusInfo) {
+        if (hdrDynamicInfo
+                && hdrDynamicInfo->m.type_ == C2Config::HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_40) {
             hdr.validTypes |= HdrMetadata::HDR10PLUS;
             hdr.hdr10plus.assign(
-                    hdr10PlusInfo->m.value,
-                    hdr10PlusInfo->m.value + hdr10PlusInfo->flexCount());
+                    hdrDynamicInfo->m.data,
+                    hdrDynamicInfo->m.data + hdrDynamicInfo->flexCount());
         }
         qbi.setHdrMetadata(hdr);
+
+        SetHdrMetadataToGralloc4Handle(hdrStaticInfo, hdrDynamicInfo, block.handle());
     }
     // we don't have dirty regions
     qbi.setSurfaceDamage(Region::INVALID_REGION);
@@ -1382,6 +1430,12 @@
                 }
             }
         }
+
+        int32_t tunneled = 0;
+        if (!outputFormat->findInt32("android._tunneled", &tunneled)) {
+            tunneled = 0;
+        }
+        mTunneled = (tunneled != 0);
     }
 
     // Set up pipeline control. This has to be done after mInputBuffers and
@@ -1848,7 +1902,7 @@
 
     int32_t flags = 0;
     if (worklet->output.flags & C2FrameData::FLAG_END_OF_STREAM) {
-        flags |= MediaCodec::BUFFER_FLAG_EOS;
+        flags |= BUFFER_FLAG_END_OF_STREAM;
         ALOGV("[%s] onWorkDone: output EOS", mName);
     }
 
@@ -1865,6 +1919,8 @@
         // When using input surface we need to restore the original input timestamp.
         timestamp = work->input.ordinal.customOrdinal;
     }
+    ScopedTrace trace(ATRACE_TAG, android::base::StringPrintf(
+            "CCodecBufferChannel::onWorkDone(%s@ts=%lld)", mName, timestamp.peekll()).c_str());
     ALOGV("[%s] onWorkDone: input %lld, codec %lld => output %lld => %lld",
           mName,
           work->input.ordinal.customOrdinal.peekll(),
@@ -1886,7 +1942,7 @@
         sp<MediaCodecBuffer> outBuffer;
         if (output->buffers && output->buffers->registerCsd(initData, &index, &outBuffer) == OK) {
             outBuffer->meta()->setInt64("timeUs", timestamp.peek());
-            outBuffer->meta()->setInt32("flags", MediaCodec::BUFFER_FLAG_CODECCONFIG);
+            outBuffer->meta()->setInt32("flags", BUFFER_FLAG_CODEC_CONFIG);
             ALOGV("[%s] onWorkDone: csd index = %zu [%p]", mName, index, outBuffer.get());
 
             output.unlock();
@@ -1899,10 +1955,21 @@
         }
     }
 
+    bool drop = false;
+    if (worklet->output.flags & C2FrameData::FLAG_DROP_FRAME) {
+        ALOGV("[%s] onWorkDone: drop buffer but keep metadata", mName);
+        drop = true;
+    }
+
     if (notifyClient && !buffer && !flags) {
-        ALOGV("[%s] onWorkDone: Not reporting output buffer (%lld)",
-              mName, work->input.ordinal.frameIndex.peekull());
-        notifyClient = false;
+        if (mTunneled && drop && outputFormat) {
+            ALOGV("[%s] onWorkDone: Keep tunneled, drop frame with format change (%lld)",
+                  mName, work->input.ordinal.frameIndex.peekull());
+        } else {
+            ALOGV("[%s] onWorkDone: Not reporting output buffer (%lld)",
+                  mName, work->input.ordinal.frameIndex.peekull());
+            notifyClient = false;
+        }
     }
 
     if (buffer) {
@@ -1911,7 +1978,7 @@
             switch (info->coreIndex().coreIndex()) {
                 case C2StreamPictureTypeMaskInfo::CORE_INDEX:
                     if (((C2StreamPictureTypeMaskInfo *)info.get())->value & C2Config::SYNC_FRAME) {
-                        flags |= MediaCodec::BUFFER_FLAG_SYNCFRAME;
+                        flags |= BUFFER_FLAG_KEY_FRAME;
                     }
                     break;
                 default:
@@ -2031,12 +2098,13 @@
 }
 
 PipelineWatcher::Clock::duration CCodecBufferChannel::elapsed() {
-    // When client pushed EOS, we want all the work to be done quickly.
     // Otherwise, component may have stalled work due to input starvation up to
     // the sum of the delay in the pipeline.
+    // TODO(b/231253301): When client pushed EOS, the pipeline could have less
+    //                    number of frames.
     size_t n = 0;
-    if (!mInputMetEos) {
-        size_t outputDelay = mOutput.lock()->outputDelay;
+    size_t outputDelay = mOutput.lock()->outputDelay;
+    {
         Mutexed<Input>::Locked input(mInput);
         n = input->inputDelay + input->pipelineDelay + outputDelay;
     }
@@ -2115,4 +2183,13 @@
     }
 }
 
+status_t CCodecBufferChannel::pushBlankBufferToOutputSurface() {
+  Mutexed<OutputSurface>::Locked output(mOutputSurface);
+  sp<ANativeWindow> nativeWindow = static_cast<ANativeWindow *>(output->surface.get());
+  if (nativeWindow == nullptr) {
+      return INVALID_OPERATION;
+  }
+  return pushBlankBuffersToNativeWindow(nativeWindow.get());
+}
+
 }  // namespace android
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index 4db69cb..b3a5f4b 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -181,6 +181,11 @@
 
     void setMetaMode(MetaMode mode);
 
+    /**
+     * Push a blank buffer to the configured native output surface.
+     */
+    status_t pushBlankBufferToOutputSurface();
+
 private:
     class QueueGuard;
 
@@ -324,6 +329,8 @@
         return mCrypto != nullptr || mDescrambler != nullptr;
     }
     std::atomic_bool mSendEncryptedInfoBuffer;
+
+    std::atomic_bool mTunneled;
 };
 
 // Conversion of a c2_status_t value to a status_t value may depend on the
diff --git a/media/codec2/sfplugin/CCodecBuffers.cpp b/media/codec2/sfplugin/CCodecBuffers.cpp
index 333a2ca..9a71198 100644
--- a/media/codec2/sfplugin/CCodecBuffers.cpp
+++ b/media/codec2/sfplugin/CCodecBuffers.cpp
@@ -21,7 +21,7 @@
 #include <C2PlatformSupport.h>
 
 #include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/MediaCodec.h>
+#include <media/stagefright/foundation/MediaDefs.h>
 #include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/SkipCutBuffer.h>
 #include <mediadrm/ICrypto.h>
@@ -33,6 +33,8 @@
 
 namespace {
 
+constexpr uint32_t PIXEL_FORMAT_UNKNOWN = 0;
+
 sp<GraphicBlockBuffer> AllocateGraphicBuffer(
         const std::shared_ptr<C2BlockPool> &pool,
         const sp<AMessage> &format,
@@ -132,6 +134,7 @@
     if (!copy->copy(c2buffer)) {
         return nullptr;
     }
+    copy->meta()->extend(buffer->meta());
     return copy;
 }
 
@@ -199,6 +202,56 @@
     mSkipCutBuffer = new SkipCutBuffer(skip, cut, mChannelCount);
 }
 
+bool OutputBuffers::convert(
+        const std::shared_ptr<C2Buffer> &src, sp<Codec2Buffer> *dst) {
+    if (!src || src->data().type() != C2BufferData::LINEAR) {
+        return false;
+    }
+    int32_t configEncoding = kAudioEncodingPcm16bit;
+    int32_t codecEncoding = kAudioEncodingPcm16bit;
+    if (mFormat->findInt32("android._codec-pcm-encoding", &codecEncoding)
+            && mFormat->findInt32("android._config-pcm-encoding", &configEncoding)) {
+        if (mSrcEncoding != codecEncoding || mDstEncoding != configEncoding) {
+            if (codecEncoding != configEncoding) {
+                mDataConverter = AudioConverter::Create(
+                        (AudioEncoding)codecEncoding, (AudioEncoding)configEncoding);
+                ALOGD_IF(mDataConverter, "[%s] Converter created from %d to %d",
+                         mName, codecEncoding, configEncoding);
+                mFormatWithConverter = mFormat->dup();
+                mFormatWithConverter->setInt32(KEY_PCM_ENCODING, configEncoding);
+            } else {
+                mDataConverter = nullptr;
+                mFormatWithConverter = nullptr;
+            }
+            mSrcEncoding = codecEncoding;
+            mDstEncoding = configEncoding;
+        }
+        if (int encoding; !mFormat->findInt32(KEY_PCM_ENCODING, &encoding)
+                || encoding != mDstEncoding) {
+        }
+    }
+    if (!mDataConverter) {
+        return false;
+    }
+    sp<MediaCodecBuffer> srcBuffer = ConstLinearBlockBuffer::Allocate(mFormat, src);
+    if (!srcBuffer) {
+        return false;
+    }
+    if (!*dst) {
+        *dst = new Codec2Buffer(
+                mFormat,
+                new ABuffer(mDataConverter->targetSize(srcBuffer->size())));
+    }
+    sp<MediaCodecBuffer> dstBuffer = *dst;
+    status_t err = mDataConverter->convert(srcBuffer, dstBuffer);
+    if (err != OK) {
+        ALOGD("[%s] buffer conversion failed: %d", mName, err);
+        return false;
+    }
+    dstBuffer->setFormat(mFormatWithConverter);
+    return true;
+}
+
 void OutputBuffers::clearStash() {
     mPending.clear();
     mReorderStash.clear();
@@ -236,7 +289,7 @@
         int32_t flags,
         const sp<AMessage>& format,
         const C2WorkOrdinalStruct& ordinal) {
-    bool eos = flags & MediaCodec::BUFFER_FLAG_EOS;
+    bool eos = flags & BUFFER_FLAG_END_OF_STREAM;
     if (!buffer && eos) {
         // TRICKY: we may be violating ordering of the stash here. Because we
         // don't expect any more emplace() calls after this, the ordering should
@@ -244,7 +297,7 @@
         mReorderStash.emplace_back(
                 buffer, notify, timestamp, flags, format, ordinal);
     } else {
-        flags = flags & ~MediaCodec::BUFFER_FLAG_EOS;
+        flags = flags & ~BUFFER_FLAG_END_OF_STREAM;
         auto it = mReorderStash.begin();
         for (; it != mReorderStash.end(); ++it) {
             if (less(ordinal, it->ordinal)) {
@@ -255,7 +308,7 @@
                 buffer, notify, timestamp, flags, format, ordinal);
         if (eos) {
             mReorderStash.back().flags =
-                mReorderStash.back().flags | MediaCodec::BUFFER_FLAG_EOS;
+                mReorderStash.back().flags | BUFFER_FLAG_END_OF_STREAM;
         }
     }
     while (!mReorderStash.empty() && mReorderStash.size() > mDepth) {
@@ -292,7 +345,7 @@
 
     // Flushing mReorderStash because no other buffers should come after output
     // EOS.
-    if (entry.flags & MediaCodec::BUFFER_FLAG_EOS) {
+    if (entry.flags & BUFFER_FLAG_END_OF_STREAM) {
         // Flush reorder stash
         setReorderDepth(0);
     }
@@ -1078,7 +1131,7 @@
         return err;
     }
     c2Buffer->setFormat(mFormat);
-    if (!c2Buffer->copy(buffer)) {
+    if (!convert(buffer, &c2Buffer) && !c2Buffer->copy(buffer)) {
         ALOGD("[%s] copy buffer failed", mName);
         return WOULD_BLOCK;
     }
@@ -1194,9 +1247,12 @@
         const std::shared_ptr<C2Buffer> &buffer,
         size_t *index,
         sp<MediaCodecBuffer> *clientBuffer) {
-    sp<Codec2Buffer> newBuffer = wrap(buffer);
-    if (newBuffer == nullptr) {
-        return NO_MEMORY;
+    sp<Codec2Buffer> newBuffer;
+    if (!convert(buffer, &newBuffer)) {
+        newBuffer = wrap(buffer);
+        if (newBuffer == nullptr) {
+            return NO_MEMORY;
+        }
     }
     newBuffer->setFormat(mFormat);
     *index = mImpl.assignSlot(newBuffer);
diff --git a/media/codec2/sfplugin/CCodecBuffers.h b/media/codec2/sfplugin/CCodecBuffers.h
index 995d3a4..c8e9930 100644
--- a/media/codec2/sfplugin/CCodecBuffers.h
+++ b/media/codec2/sfplugin/CCodecBuffers.h
@@ -18,9 +18,11 @@
 
 #define CCODEC_BUFFERS_H_
 
+#include <optional>
 #include <string>
 
 #include <C2Config.h>
+#include <DataConverter.h>
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/MediaCodecBuffer.h>
 
@@ -382,6 +384,14 @@
      */
     void submit(const sp<MediaCodecBuffer> &buffer);
 
+    /**
+     * Apply DataConverter from |src| to |*dst| if needed. If |*dst| is nullptr,
+     * a new buffer is allocated.
+     *
+     * Returns true if conversion was needed and executed; false otherwise.
+     */
+    bool convert(const std::shared_ptr<C2Buffer> &src, sp<Codec2Buffer> *dst);
+
 private:
     // SkipCutBuffer
     int32_t mDelay;
@@ -391,6 +401,12 @@
 
     void setSkipCutBuffer(int32_t skip, int32_t cut);
 
+    // DataConverter
+    sp<DataConverter> mDataConverter;
+    sp<AMessage> mFormatWithConverter;
+    std::optional<int32_t> mSrcEncoding;
+    std::optional<int32_t> mDstEncoding;
+
     // Output stash
 
     // Struct for an entry in the output stash (mPending and mReorderStash)
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index c275187..0f998dd 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -20,6 +20,8 @@
 #include <log/log.h>
 #include <utils/NativeHandle.h>
 
+#include <android-base/properties.h>
+
 #include <C2Component.h>
 #include <C2Param.h>
 #include <util/C2InterfaceHelper.h>
@@ -324,7 +326,8 @@
     : mInputFormat(new AMessage),
       mOutputFormat(new AMessage),
       mUsingSurface(false),
-      mTunneled(false) { }
+      mTunneled(false),
+      mPushBlankBuffersOnStop(false) { }
 
 void CCodecConfig::initializeStandardParams() {
     typedef Domain D;
@@ -657,24 +660,29 @@
     add(ConfigMapper(KEY_SAMPLE_RATE,   C2_PARAMKEY_CODED_SAMPLE_RATE,  "value")
         .limitTo(D::AUDIO & D::CODED));
 
-    add(ConfigMapper(KEY_PCM_ENCODING,  C2_PARAMKEY_PCM_ENCODING,       "value")
+    auto pcmEncodingMapper = [](C2Value v) -> C2Value {
+        int32_t value;
+        C2Config::pcm_encoding_t to;
+        if (v.get(&value) && C2Mapper::map(value, &to)) {
+            return to;
+        }
+        return C2Value();
+    };
+    auto pcmEncodingReverse = [](C2Value v) -> C2Value {
+        C2Config::pcm_encoding_t value;
+        int32_t to;
+        using C2ValueType=typename _c2_reduce_enum_to_underlying_type<decltype(value)>::type;
+        if (v.get((C2ValueType*)&value) && C2Mapper::map(value, &to)) {
+            return to;
+        }
+        return C2Value();
+    };
+    add(ConfigMapper(KEY_PCM_ENCODING,              C2_PARAMKEY_PCM_ENCODING, "value")
         .limitTo(D::AUDIO)
-        .withMappers([](C2Value v) -> C2Value {
-            int32_t value;
-            C2Config::pcm_encoding_t to;
-            if (v.get(&value) && C2Mapper::map(value, &to)) {
-                return to;
-            }
-            return C2Value();
-        }, [](C2Value v) -> C2Value {
-            C2Config::pcm_encoding_t value;
-            int32_t to;
-            using C2ValueType=typename _c2_reduce_enum_to_underlying_type<decltype(value)>::type;
-            if (v.get((C2ValueType*)&value) && C2Mapper::map(value, &to)) {
-                return to;
-            }
-            return C2Value();
-        }));
+        .withMappers(pcmEncodingMapper, pcmEncodingReverse));
+    add(ConfigMapper("android._codec-pcm-encoding", C2_PARAMKEY_PCM_ENCODING, "value")
+        .limitTo(D::AUDIO & D::READ)
+        .withMappers(pcmEncodingMapper, pcmEncodingReverse));
 
     add(ConfigMapper(KEY_IS_ADTS, C2_PARAMKEY_AAC_PACKAGING, "value")
         .limitTo(D::AUDIO & D::CODED)
@@ -948,9 +956,29 @@
             return value == 0 ? C2_FALSE : C2_TRUE;
         }));
 
-    /* still to do
-    constexpr char KEY_PUSH_BLANK_BUFFERS_ON_STOP[] = "push-blank-buffers-on-shutdown";
+    add(ConfigMapper(KEY_VIDEO_QP_AVERAGE, C2_PARAMKEY_AVERAGE_QP, "value")
+        .limitTo(D::ENCODER & D::VIDEO & D::READ));
 
+    add(ConfigMapper(KEY_PICTURE_TYPE, C2_PARAMKEY_PICTURE_TYPE, "value")
+        .limitTo(D::ENCODER & D::VIDEO & D::READ)
+        .withMappers([](C2Value v) -> C2Value {
+            int32_t sdk;
+            C2Config::picture_type_t c2;
+            if (v.get(&sdk) && C2Mapper::map(sdk, &c2)) {
+                return C2Value(c2);
+            }
+            return C2Value();
+        }, [](C2Value v) -> C2Value {
+            C2Config::picture_type_t c2;
+            int32_t sdk = PICTURE_TYPE_UNKNOWN;
+            using C2ValueType=typename _c2_reduce_enum_to_underlying_type<decltype(c2)>::type;
+            if (v.get((C2ValueType*)&c2) && C2Mapper::map(c2, &sdk)) {
+                return sdk;
+            }
+            return C2Value();
+        }));
+
+    /* still to do
        not yet used by MediaCodec, but defined as MediaFormat
     KEY_AUDIO_SESSION_ID // we use "audio-hw-sync"
     KEY_OUTPUT_REORDER_DEPTH
@@ -1057,6 +1085,13 @@
                     C2_PARAMKEY_SURFACE_SCALING_MODE);
         } else {
             addLocalParam(new C2StreamColorAspectsInfo::input(0u), C2_PARAMKEY_COLOR_ASPECTS);
+
+            if (domain.value == C2Component::DOMAIN_VIDEO) {
+                addLocalParam(new C2AndroidStreamAverageBlockQuantizationInfo::output(0u, 0),
+                              C2_PARAMKEY_AVERAGE_QP);
+                addLocalParam(new C2StreamPictureTypeMaskInfo::output(0u, 0),
+                              C2_PARAMKEY_PICTURE_TYPE);
+            }
         }
     }
 
@@ -1098,15 +1133,21 @@
         const std::shared_ptr<Codec2Client::Configurable> &configurable,
         const std::vector<C2Param::Index> &indices,
         c2_blocking_t blocking) {
+    static const int32_t kProductFirstApiLevel =
+        base::GetIntProperty<int32_t>("ro.product.first_api_level", 0);
+    static const int32_t kBoardApiLevel =
+        base::GetIntProperty<int32_t>("ro.board.first_api_level", 0);
+    static const int32_t kFirstApiLevel =
+        (kBoardApiLevel != 0) ? kBoardApiLevel : kProductFirstApiLevel;
     mSubscribedIndices.insert(indices.begin(), indices.end());
-    // TODO: enable this when components no longer crash on this config
-    if (mSubscribedIndices.size() != mSubscribedIndicesSize && false) {
-        std::vector<uint32_t> indices;
+    if (mSubscribedIndices.size() != mSubscribedIndicesSize
+            && kFirstApiLevel >= __ANDROID_API_T__) {
+        std::vector<uint32_t> indicesVector;
         for (C2Param::Index ix : mSubscribedIndices) {
-            indices.push_back(ix);
+            indicesVector.push_back(ix);
         }
         std::unique_ptr<C2SubscribedParamIndicesTuning> subscribeTuning =
-            C2SubscribedParamIndicesTuning::AllocUnique(indices);
+            C2SubscribedParamIndicesTuning::AllocUnique(indicesVector);
         std::vector<std::unique_ptr<C2SettingResult>> results;
         c2_status_t c2Err = configurable->config({ subscribeTuning.get() }, blocking, &results);
         if (c2Err != C2_OK && c2Err != C2_BAD_INDEX) {
@@ -1882,7 +1923,9 @@
     names->clear();
     // TODO: expand to standard params
     for (const auto &[key, desc] : mVendorParams) {
-        names->push_back(key);
+        if (desc->isVisible()) {
+            names->push_back(key);
+        }
     }
     return OK;
 }
diff --git a/media/codec2/sfplugin/CCodecConfig.h b/media/codec2/sfplugin/CCodecConfig.h
index 417b773..2e7b866 100644
--- a/media/codec2/sfplugin/CCodecConfig.h
+++ b/media/codec2/sfplugin/CCodecConfig.h
@@ -148,6 +148,8 @@
     bool mTunneled;
     sp<NativeHandle> mSidebandHandle;
 
+    bool mPushBlankBuffersOnStop;
+
     CCodecConfig();
 
     /// initializes the members required to manage the format: descriptors, reflector,
@@ -363,11 +365,6 @@
             const std::vector<std::string> &names,
             c2_blocking_t blocking = C2_DONT_BLOCK);
 
-private:
-
-    /// initializes the standard MediaCodec to Codec 2.0 params mapping
-    void initializeStandardParams();
-
     /// Adds indices to the subscribed indices, and updated subscription to component
     /// \param blocking blocking mode to use with the component
     status_t subscribeToConfigUpdate(
@@ -375,6 +372,11 @@
             const std::vector<C2Param::Index> &indices,
             c2_blocking_t blocking = C2_DONT_BLOCK);
 
+private:
+
+    /// initializes the standard MediaCodec to Codec 2.0 params mapping
+    void initializeStandardParams();
+
     /// Gets SDK format from codec 2.0 reflected configuration
     /// \param domain input/output bitmask
     sp<AMessage> getFormatForDomain(
@@ -396,4 +398,3 @@
 }  // namespace android
 
 #endif  // C_CODEC_H_
-
diff --git a/media/codec2/sfplugin/Codec2Buffer.cpp b/media/codec2/sfplugin/Codec2Buffer.cpp
index 4070478..c2405e8 100644
--- a/media/codec2/sfplugin/Codec2Buffer.cpp
+++ b/media/codec2/sfplugin/Codec2Buffer.cpp
@@ -16,11 +16,18 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "Codec2Buffer"
+#define ATRACE_TAG  ATRACE_TAG_VIDEO
 #include <utils/Log.h>
+#include <utils/Trace.h>
 
+#include <aidl/android/hardware/graphics/common/Cta861_3.h>
+#include <aidl/android/hardware/graphics/common/Smpte2086.h>
 #include <android-base/properties.h>
 #include <android/hardware/cas/native/1.0/types.h>
 #include <android/hardware/drm/1.0/types.h>
+#include <android/hardware/graphics/common/1.2/types.h>
+#include <android/hardware/graphics/mapper/4.0/IMapper.h>
+#include <gralloctypes/Gralloc4.h>
 #include <hidlmemory/FrameworkUtils.h>
 #include <media/hardware/HardwareAPI.h>
 #include <media/stagefright/CodecBase.h>
@@ -224,6 +231,7 @@
           mAllocatedDepth(0),
           mBackBufferSize(0),
           mMediaImage(new ABuffer(sizeof(MediaImage2))) {
+        ATRACE_CALL();
         if (!format->findInt32(KEY_COLOR_FORMAT, &mClientColorFormat)) {
             mClientColorFormat = COLOR_FormatYUV420Flexible;
         }
@@ -358,21 +366,22 @@
                         break;
 
                     case COLOR_FormatYUVP010:
+                        // stride is in bytes
                         mediaImage->mPlane[mediaImage->Y].mOffset = 0;
                         mediaImage->mPlane[mediaImage->Y].mColInc = 2;
-                        mediaImage->mPlane[mediaImage->Y].mRowInc = stride * 2;
+                        mediaImage->mPlane[mediaImage->Y].mRowInc = stride;
                         mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = 1;
                         mediaImage->mPlane[mediaImage->Y].mVertSubsampling = 1;
 
-                        mediaImage->mPlane[mediaImage->U].mOffset = stride * vStride * 2;
+                        mediaImage->mPlane[mediaImage->U].mOffset = stride * vStride;
                         mediaImage->mPlane[mediaImage->U].mColInc = 4;
-                        mediaImage->mPlane[mediaImage->U].mRowInc = stride * 2;
+                        mediaImage->mPlane[mediaImage->U].mRowInc = stride;
                         mediaImage->mPlane[mediaImage->U].mHorizSubsampling = 2;
                         mediaImage->mPlane[mediaImage->U].mVertSubsampling = 2;
 
-                        mediaImage->mPlane[mediaImage->V].mOffset = stride * vStride * 2 + 2;
+                        mediaImage->mPlane[mediaImage->V].mOffset = stride * vStride + 2;
                         mediaImage->mPlane[mediaImage->V].mColInc = 4;
-                        mediaImage->mPlane[mediaImage->V].mRowInc = stride * 2;
+                        mediaImage->mPlane[mediaImage->V].mRowInc = stride;
                         mediaImage->mPlane[mediaImage->V].mHorizSubsampling = 2;
                         mediaImage->mPlane[mediaImage->V].mVertSubsampling = 2;
                         if (tryWrapping) {
@@ -533,8 +542,8 @@
                 mInitCheck = BAD_VALUE;
                 return;
             }
-            bufferSize += stride * vStride
-                    / plane.rowSampling / plane.colSampling * divUp(mAllocatedDepth, 8u);
+            // stride is in bytes
+            bufferSize += stride * vStride / plane.rowSampling / plane.colSampling;
         }
 
         mBackBufferSize = bufferSize;
@@ -575,6 +584,7 @@
      * Copy C2GraphicView to MediaImage2.
      */
     status_t copyToMediaImage() {
+        ATRACE_CALL();
         if (mInitCheck != OK) {
             return mInitCheck;
         }
@@ -613,7 +623,9 @@
         const sp<AMessage> &format,
         const std::shared_ptr<C2GraphicBlock> &block,
         std::function<sp<ABuffer>(size_t)> alloc) {
+    ATRACE_BEGIN("GraphicBlockBuffer::Allocate block->map()");
     C2GraphicView view(block->map().get());
+    ATRACE_END();
     if (view.error() != C2_OK) {
         ALOGD("C2GraphicBlock::map failed: %d", view.error());
         return nullptr;
@@ -658,6 +670,7 @@
 }
 
 std::shared_ptr<C2Buffer> GraphicBlockBuffer::asC2Buffer() {
+    ATRACE_CALL();
     uint32_t width = mView.width();
     uint32_t height = mView.height();
     if (!mWrapped) {
@@ -746,8 +759,10 @@
         ALOGD("C2Buffer precond fail");
         return nullptr;
     }
+    ATRACE_BEGIN("ConstGraphicBlockBuffer::Allocate block->map()");
     std::unique_ptr<const C2GraphicView> view(std::make_unique<const C2GraphicView>(
             buffer->data().graphicBlocks()[0].map().get()));
+    ATRACE_END();
     std::unique_ptr<const C2GraphicView> holder;
 
     GraphicView2MediaImageConverter converter(*view, format, false /* copy */);
@@ -787,8 +802,14 @@
         ALOGD("format had no width / height");
         return nullptr;
     }
-    // NOTE: we currently only support YUV420 formats for byte-buffer mode.
-    sp<ABuffer> aBuffer(alloc(align(width, 16) * align(height, 16) * 3 / 2));
+    int32_t colorFormat = COLOR_FormatYUV420Flexible;
+    int32_t bpp = 12;  // 8(Y) + 2(U) + 2(V)
+    if (format->findInt32(KEY_COLOR_FORMAT, &colorFormat)) {
+        if (colorFormat == COLOR_FormatYUVP010) {
+            bpp = 24;  // 16(Y) + 4(U) + 4(V)
+        }
+    }
+    sp<ABuffer> aBuffer(alloc(align(width, 16) * align(height, 16) * bpp / 8));
     return new ConstGraphicBlockBuffer(
             format,
             aBuffer,
@@ -842,11 +863,13 @@
         return false;
     }
 
+    ATRACE_BEGIN("ConstGraphicBlockBuffer::canCopy block->map()");
     GraphicView2MediaImageConverter converter(
             buffer->data().graphicBlocks()[0].map().get(),
             // FIXME: format() is not const, but we cannot change it, so do a const cast here
             const_cast<ConstGraphicBlockBuffer *>(this)->format(),
             true /* copy */);
+    ATRACE_END();
     if (converter.initCheck() != OK) {
         ALOGD("ConstGraphicBlockBuffer::canCopy: converter init failed: %d", converter.initCheck());
         return false;
@@ -941,4 +964,245 @@
     return const_cast<native_handle_t *>(mBlock->handle());
 }
 
+using ::aidl::android::hardware::graphics::common::Cta861_3;
+using ::aidl::android::hardware::graphics::common::Smpte2086;
+
+using ::android::gralloc4::MetadataType_Cta861_3;
+using ::android::gralloc4::MetadataType_Smpte2086;
+using ::android::gralloc4::MetadataType_Smpte2094_40;
+
+using ::android::hardware::Return;
+using ::android::hardware::hidl_vec;
+
+using Error4 = ::android::hardware::graphics::mapper::V4_0::Error;
+using IMapper4 = ::android::hardware::graphics::mapper::V4_0::IMapper;
+
+namespace {
+
+sp<IMapper4> GetMapper4() {
+    static sp<IMapper4> sMapper = IMapper4::getService();
+    return sMapper;
+}
+
+class Gralloc4Buffer {
+public:
+    Gralloc4Buffer(const C2Handle *const handle) : mBuffer(nullptr) {
+        sp<IMapper4> mapper = GetMapper4();
+        if (!mapper) {
+            return;
+        }
+        // Unwrap raw buffer handle from the C2Handle
+        native_handle_t *nh = UnwrapNativeCodec2GrallocHandle(handle);
+        if (!nh) {
+            return;
+        }
+        // Import the raw handle so IMapper can use the buffer. The imported
+        // handle must be freed when the client is done with the buffer.
+        mapper->importBuffer(
+                hardware::hidl_handle(nh),
+                [&](const Error4 &error, void *buffer) {
+                    if (error == Error4::NONE) {
+                        mBuffer = buffer;
+                    }
+                });
+
+        // TRICKY: UnwrapNativeCodec2GrallocHandle creates a new handle but
+        //         does not clone the fds. Thus we need to delete the handle
+        //         without closing it.
+        native_handle_delete(nh);
+    }
+
+    ~Gralloc4Buffer() {
+        sp<IMapper4> mapper = GetMapper4();
+        if (mapper && mBuffer) {
+            // Free the imported buffer handle. This does not release the
+            // underlying buffer itself.
+            mapper->freeBuffer(mBuffer);
+        }
+    }
+
+    void *get() const { return mBuffer; }
+    operator bool() const { return (mBuffer != nullptr); }
+private:
+    void *mBuffer;
+};
+
+}  // namspace
+
+c2_status_t GetHdrMetadataFromGralloc4Handle(
+        const C2Handle *const handle,
+        std::shared_ptr<C2StreamHdrStaticMetadataInfo::input> *staticInfo,
+        std::shared_ptr<C2StreamHdrDynamicMetadataInfo::input> *dynamicInfo) {
+    c2_status_t err = C2_OK;
+    sp<IMapper4> mapper = GetMapper4();
+    Gralloc4Buffer buffer(handle);
+    if (!mapper || !buffer) {
+        // Gralloc4 not supported; nothing to do
+        return err;
+    }
+    Error4 mapperErr = Error4::NONE;
+    if (staticInfo) {
+        ALOGV("Grabbing static HDR info from gralloc4 metadata");
+        staticInfo->reset(new C2StreamHdrStaticMetadataInfo::input(0u));
+        memset(&(*staticInfo)->mastering, 0, sizeof((*staticInfo)->mastering));
+        (*staticInfo)->maxCll = 0;
+        (*staticInfo)->maxFall = 0;
+        IMapper4::get_cb cb = [&mapperErr, staticInfo](Error4 err, const hidl_vec<uint8_t> &vec) {
+            mapperErr = err;
+            if (err != Error4::NONE) {
+                return;
+            }
+
+            std::optional<Smpte2086> smpte2086;
+            gralloc4::decodeSmpte2086(vec, &smpte2086);
+            if (smpte2086) {
+                (*staticInfo)->mastering.red.x    = smpte2086->primaryRed.x;
+                (*staticInfo)->mastering.red.y    = smpte2086->primaryRed.y;
+                (*staticInfo)->mastering.green.x  = smpte2086->primaryGreen.x;
+                (*staticInfo)->mastering.green.y  = smpte2086->primaryGreen.y;
+                (*staticInfo)->mastering.blue.x   = smpte2086->primaryBlue.x;
+                (*staticInfo)->mastering.blue.y   = smpte2086->primaryBlue.y;
+                (*staticInfo)->mastering.white.x  = smpte2086->whitePoint.x;
+                (*staticInfo)->mastering.white.y  = smpte2086->whitePoint.y;
+
+                (*staticInfo)->mastering.maxLuminance = smpte2086->maxLuminance;
+                (*staticInfo)->mastering.minLuminance = smpte2086->minLuminance;
+            } else {
+                mapperErr = Error4::BAD_VALUE;
+            }
+        };
+        Return<void> ret = mapper->get(buffer.get(), MetadataType_Smpte2086, cb);
+        if (!ret.isOk()) {
+            err = C2_REFUSED;
+        } else if (mapperErr != Error4::NONE) {
+            err = C2_CORRUPTED;
+        }
+        cb = [&mapperErr, staticInfo](Error4 err, const hidl_vec<uint8_t> &vec) {
+            mapperErr = err;
+            if (err != Error4::NONE) {
+                return;
+            }
+
+            std::optional<Cta861_3> cta861_3;
+            gralloc4::decodeCta861_3(vec, &cta861_3);
+            if (cta861_3) {
+                (*staticInfo)->maxCll   = cta861_3->maxContentLightLevel;
+                (*staticInfo)->maxFall  = cta861_3->maxFrameAverageLightLevel;
+            } else {
+                mapperErr = Error4::BAD_VALUE;
+            }
+        };
+        ret = mapper->get(buffer.get(), MetadataType_Cta861_3, cb);
+        if (!ret.isOk()) {
+            err = C2_REFUSED;
+        } else if (mapperErr != Error4::NONE) {
+            err = C2_CORRUPTED;
+        }
+    }
+    if (dynamicInfo) {
+        ALOGV("Grabbing dynamic HDR info from gralloc4 metadata");
+        dynamicInfo->reset();
+        IMapper4::get_cb cb = [&mapperErr, dynamicInfo](Error4 err, const hidl_vec<uint8_t> &vec) {
+            mapperErr = err;
+            if (err != Error4::NONE) {
+                return;
+            }
+            if (!dynamicInfo) {
+                return;
+            }
+            *dynamicInfo = C2StreamHdrDynamicMetadataInfo::input::AllocShared(
+                    vec.size(), 0u, C2Config::HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_40);
+            memcpy((*dynamicInfo)->m.data, vec.data(), vec.size());
+        };
+        Return<void> ret = mapper->get(buffer.get(), MetadataType_Smpte2094_40, cb);
+        if (!ret.isOk() || mapperErr != Error4::NONE) {
+            dynamicInfo->reset();
+        }
+    }
+
+    return err;
+}
+
+c2_status_t SetHdrMetadataToGralloc4Handle(
+        const std::shared_ptr<const C2StreamHdrStaticMetadataInfo::output> &staticInfo,
+        const std::shared_ptr<const C2StreamHdrDynamicMetadataInfo::output> &dynamicInfo,
+        const C2Handle *const handle) {
+    c2_status_t err = C2_OK;
+    sp<IMapper4> mapper = GetMapper4();
+    Gralloc4Buffer buffer(handle);
+    if (!mapper || !buffer) {
+        // Gralloc4 not supported; nothing to do
+        return err;
+    }
+    if (staticInfo && *staticInfo) {
+        ALOGV("Setting static HDR info as gralloc4 metadata");
+        std::optional<Smpte2086> smpte2086 = Smpte2086{
+            {staticInfo->mastering.red.x, staticInfo->mastering.red.y},
+            {staticInfo->mastering.green.x, staticInfo->mastering.green.y},
+            {staticInfo->mastering.blue.x, staticInfo->mastering.blue.y},
+            {staticInfo->mastering.white.x, staticInfo->mastering.white.y},
+            staticInfo->mastering.maxLuminance,
+            staticInfo->mastering.minLuminance,
+        };
+        hidl_vec<uint8_t> vec;
+        if (0.0 <= smpte2086->primaryRed.x && smpte2086->primaryRed.x <= 1.0
+                && 0.0 <= smpte2086->primaryRed.y && smpte2086->primaryRed.y <= 1.0
+                && 0.0 <= smpte2086->primaryGreen.x && smpte2086->primaryGreen.x <= 1.0
+                && 0.0 <= smpte2086->primaryGreen.y && smpte2086->primaryGreen.y <= 1.0
+                && 0.0 <= smpte2086->primaryBlue.x && smpte2086->primaryBlue.x <= 1.0
+                && 0.0 <= smpte2086->primaryBlue.y && smpte2086->primaryBlue.y <= 1.0
+                && 0.0 <= smpte2086->whitePoint.x && smpte2086->whitePoint.x <= 1.0
+                && 0.0 <= smpte2086->whitePoint.y && smpte2086->whitePoint.y <= 1.0
+                && 0.0 <= smpte2086->maxLuminance && 0.0 <= smpte2086->minLuminance
+                && gralloc4::encodeSmpte2086(smpte2086, &vec) == OK) {
+            Return<Error4> ret = mapper->set(buffer.get(), MetadataType_Smpte2086, vec);
+            if (!ret.isOk()) {
+                err = C2_REFUSED;
+            } else if (ret != Error4::NONE) {
+                err = C2_CORRUPTED;
+            }
+        }
+        std::optional<Cta861_3> cta861_3 = Cta861_3{
+            staticInfo->maxCll,
+            staticInfo->maxFall,
+        };
+        if (0.0 <= cta861_3->maxContentLightLevel && 0.0 <= cta861_3->maxFrameAverageLightLevel
+                && gralloc4::encodeCta861_3(cta861_3, &vec) == OK) {
+            Return<Error4> ret = mapper->set(buffer.get(), MetadataType_Cta861_3, vec);
+            if (!ret.isOk()) {
+                err = C2_REFUSED;
+            } else if (ret != Error4::NONE) {
+                err = C2_CORRUPTED;
+            }
+        }
+    }
+    if (dynamicInfo && *dynamicInfo && dynamicInfo->flexCount() > 0) {
+        ALOGV("Setting dynamic HDR info as gralloc4 metadata");
+        hidl_vec<uint8_t> vec;
+        vec.resize(dynamicInfo->flexCount());
+        memcpy(vec.data(), dynamicInfo->m.data, dynamicInfo->flexCount());
+        std::optional<IMapper4::MetadataType> metadataType;
+        switch (dynamicInfo->m.type_) {
+        case C2Config::HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_10:
+            // TODO
+            break;
+        case C2Config::HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_40:
+            metadataType = MetadataType_Smpte2094_40;
+            break;
+        }
+        if (metadataType) {
+            Return<Error4> ret = mapper->set(buffer.get(), *metadataType, vec);
+            if (!ret.isOk()) {
+                err = C2_REFUSED;
+            } else if (ret != Error4::NONE) {
+                err = C2_CORRUPTED;
+            }
+        } else {
+            err = C2_BAD_VALUE;
+        }
+    }
+
+    return err;
+}
+
 }  // namespace android
diff --git a/media/codec2/sfplugin/Codec2Buffer.h b/media/codec2/sfplugin/Codec2Buffer.h
index dc788cd..b02b042 100644
--- a/media/codec2/sfplugin/Codec2Buffer.h
+++ b/media/codec2/sfplugin/Codec2Buffer.h
@@ -19,6 +19,7 @@
 #define CODEC2_BUFFER_H_
 
 #include <C2Buffer.h>
+#include <C2Config.h>
 
 #include <binder/IMemory.h>
 #include <media/hardware/VideoAPI.h>
@@ -391,6 +392,36 @@
     int32_t mHeapSeqNum;
 };
 
+/**
+ * Get HDR metadata from Gralloc4 handle.
+ *
+ * \param[in]   handle      handle of the allocation
+ * \param[out]  staticInfo  HDR static info to be filled. Ignored if null;
+ *                          if |handle| is invalid or does not contain the metadata,
+ *                          the shared_ptr is reset.
+ * \param[out]  dynamicInfo HDR dynamic info to be filled. Ignored if null;
+ *                          if |handle| is invalid or does not contain the metadata,
+ *                          the shared_ptr is reset.
+ * \return C2_OK if successful
+ */
+c2_status_t GetHdrMetadataFromGralloc4Handle(
+        const C2Handle *const handle,
+        std::shared_ptr<C2StreamHdrStaticMetadataInfo::input> *staticInfo,
+        std::shared_ptr<C2StreamHdrDynamicMetadataInfo::input> *dynamicInfo);
+
+/**
+ * Set HDR metadata to Gralloc4 handle.
+ *
+ * \param[in]   staticInfo  HDR static info to set. Ignored if null or invalid.
+ * \param[in]   dynamicInfo HDR dynamic info to set. Ignored if null or invalid.
+ * \param[out]  handle      handle of the allocation.
+ * \return C2_OK if successful
+ */
+c2_status_t SetHdrMetadataToGralloc4Handle(
+        const std::shared_ptr<const C2StreamHdrStaticMetadataInfo::output> &staticInfo,
+        const std::shared_ptr<const C2StreamHdrDynamicMetadataInfo::output> &dynamicInfo,
+        const C2Handle *const handle);
+
 }  // namespace android
 
 #endif  // CODEC2_BUFFER_H_
diff --git a/media/codec2/sfplugin/Codec2InfoBuilder.cpp b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
index 7c4bfb6..2b8a160 100644
--- a/media/codec2/sfplugin/Codec2InfoBuilder.cpp
+++ b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
@@ -54,6 +54,9 @@
 
 using Traits = C2Component::Traits;
 
+// HAL pixel format -> framework color format
+typedef std::map<uint32_t, int32_t> PixelFormatMap;
+
 namespace /* unnamed */ {
 
 bool hasPrefix(const std::string& s, const char* prefix) {
@@ -67,6 +70,26 @@
             s.compare(s.size() - suffixLen, suffixLen, suffix) == 0;
 }
 
+std::optional<int32_t> findFrameworkColorFormat(
+        const C2FlexiblePixelFormatDescriptorStruct &desc) {
+    switch (desc.bitDepth) {
+        case 8u:
+            if (desc.layout == C2Color::PLANAR_PACKED
+                    || desc.layout == C2Color::SEMIPLANAR_PACKED) {
+                return COLOR_FormatYUV420Flexible;
+            }
+            break;
+        case 10u:
+            if (desc.layout == C2Color::SEMIPLANAR_PACKED) {
+                return COLOR_FormatYUVP010;
+            }
+            break;
+        default:
+            break;
+    }
+    return std::nullopt;
+}
+
 // returns true if component advertised supported profile level(s)
 bool addSupportedProfileLevels(
         std::shared_ptr<Codec2Client::Interface> intf,
@@ -96,9 +119,12 @@
         return false;
     }
 
-    // determine if codec supports HDR
+    // determine if codec supports HDR; imply 10-bit support
     bool supportsHdr = false;
+    // determine if codec supports HDR10Plus; imply 10-bit support
     bool supportsHdr10Plus = false;
+    // determine if codec supports 10-bit format
+    bool supports10Bit = false;
 
     std::vector<std::shared_ptr<C2ParamDescriptor>> paramDescs;
     c2_status_t err1 = intf->querySupportedParams(&paramDescs);
@@ -126,6 +152,10 @@
     supportsHdr |= (mediaType == MIMETYPE_VIDEO_VP9);
     supportsHdr |= (mediaType == MIMETYPE_VIDEO_AV1);
 
+    // HDR support implies 10-bit support.
+    // TODO: directly check this from the component interface
+    supports10Bit = (supportsHdr || supportsHdr10Plus);
+
     bool added = false;
 
     for (C2Value::Primitive profile : profileQuery[0].values.values) {
@@ -165,6 +195,12 @@
                     }
                 }
             }
+            if (supports10Bit) {
+                auto bitnessMapper = C2Mapper::GetBitDepthProfileLevelMapper(trait.mediaType, 10);
+                if (bitnessMapper && bitnessMapper->mapProfile(pl.profile, &sdkProfile)) {
+                    caps->addProfileLevel((uint32_t)sdkProfile, (uint32_t)sdkLevel);
+                }
+            }
         } else if (!mapper) {
             caps->addProfileLevel(pl.profile, pl.level);
         }
@@ -198,27 +234,73 @@
 void addSupportedColorFormats(
         std::shared_ptr<Codec2Client::Interface> intf,
         MediaCodecInfo::CapabilitiesWriter *caps,
-        const Traits& trait, const std::string &mediaType) {
-    (void)intf;
-
+        const Traits& trait, const std::string &mediaType,
+        const PixelFormatMap &pixelFormatMap) {
     // TODO: get this from intf() as well, but how do we map them to
     // MediaCodec color formats?
     bool encoder = trait.kind == C2Component::KIND_ENCODER;
     if (mediaType.find("video") != std::string::npos
             || mediaType.find("image") != std::string::npos) {
+
+        std::vector<C2FieldSupportedValuesQuery> query;
+        if (encoder) {
+            C2StreamPixelFormatInfo::input pixelFormat;
+            query.push_back(C2FieldSupportedValuesQuery::Possible(
+                    C2ParamField::Make(pixelFormat, pixelFormat.value)));
+        } else {
+            C2StreamPixelFormatInfo::output pixelFormat;
+            query.push_back(C2FieldSupportedValuesQuery::Possible(
+                    C2ParamField::Make(pixelFormat, pixelFormat.value)));
+        }
+        std::list<int32_t> supportedColorFormats;
+        if (intf->querySupportedValues(query, C2_DONT_BLOCK) == C2_OK) {
+            if (query[0].status == C2_OK) {
+                const C2FieldSupportedValues &fsv = query[0].values;
+                if (fsv.type == C2FieldSupportedValues::VALUES) {
+                    for (C2Value::Primitive value : fsv.values) {
+                        auto it = pixelFormatMap.find(value.u32);
+                        if (it != pixelFormatMap.end()) {
+                            auto it2 = std::find(
+                                    supportedColorFormats.begin(),
+                                    supportedColorFormats.end(),
+                                    it->second);
+                            if (it2 == supportedColorFormats.end()) {
+                                supportedColorFormats.push_back(it->second);
+                            }
+                        }
+                    }
+                }
+            }
+        }
+        auto addDefaultColorFormat = [caps, &supportedColorFormats](int32_t colorFormat) {
+            caps->addColorFormat(colorFormat);
+            auto it = std::find(
+                    supportedColorFormats.begin(), supportedColorFormats.end(), colorFormat);
+            if (it != supportedColorFormats.end()) {
+                supportedColorFormats.erase(it);
+            }
+        };
+
+        // The color format is ordered by preference. The intention here is to advertise:
+        //   c2.android.* codecs: YUV420s, Surface, <the rest>
+        //   all other codecs:    Surface, YUV420s, <the rest>
+        // TODO: get this preference via Codec2 API
+
         // vendor video codecs prefer opaque format
         if (trait.name.find("android") == std::string::npos) {
-            caps->addColorFormat(COLOR_FormatSurface);
+            addDefaultColorFormat(COLOR_FormatSurface);
         }
-        caps->addColorFormat(COLOR_FormatYUV420Flexible);
-        caps->addColorFormat(COLOR_FormatYUV420Planar);
-        caps->addColorFormat(COLOR_FormatYUV420SemiPlanar);
-        caps->addColorFormat(COLOR_FormatYUV420PackedPlanar);
-        caps->addColorFormat(COLOR_FormatYUV420PackedSemiPlanar);
-        // framework video encoders must support surface format, though it is unclear
-        // that they will be able to map it if it is opaque
-        if (encoder && trait.name.find("android") != std::string::npos) {
-            caps->addColorFormat(COLOR_FormatSurface);
+        addDefaultColorFormat(COLOR_FormatYUV420Flexible);
+        addDefaultColorFormat(COLOR_FormatYUV420Planar);
+        addDefaultColorFormat(COLOR_FormatYUV420SemiPlanar);
+        addDefaultColorFormat(COLOR_FormatYUV420PackedPlanar);
+        addDefaultColorFormat(COLOR_FormatYUV420PackedSemiPlanar);
+        // Android video codecs prefer CPU-readable formats
+        if (trait.name.find("android") != std::string::npos) {
+            addDefaultColorFormat(COLOR_FormatSurface);
+        }
+        for (int32_t colorFormat : supportedColorFormats) {
+            caps->addColorFormat(colorFormat);
         }
     }
 }
@@ -410,6 +492,7 @@
         }
     }
 
+    std::map<std::string, PixelFormatMap> nameToPixelFormatMap;
     for (const Traits& trait : traits) {
         C2Component::rank_t rank = trait.rank;
 
@@ -423,8 +506,9 @@
         nameAndAliases.insert(nameAndAliases.begin(), trait.name);
         for (const std::string &nameOrAlias : nameAndAliases) {
             bool isAlias = trait.name != nameOrAlias;
+            std::shared_ptr<Codec2Client> client;
             std::shared_ptr<Codec2Client::Interface> intf =
-                Codec2Client::CreateInterfaceByName(nameOrAlias.c_str());
+                Codec2Client::CreateInterfaceByName(nameOrAlias.c_str(), &client);
             if (!intf) {
                 ALOGD("could not create interface for %s'%s'",
                         isAlias ? "alias " : "",
@@ -618,7 +702,40 @@
                         caps->addProfileLevel(VP8ProfileMain, VP8Level_Version0);
                     }
                 }
-                addSupportedColorFormats(intf, caps.get(), trait, mediaType);
+
+                auto it = nameToPixelFormatMap.find(client->getServiceName());
+                if (it == nameToPixelFormatMap.end()) {
+                    it = nameToPixelFormatMap.try_emplace(client->getServiceName()).first;
+                    PixelFormatMap &pixelFormatMap = it->second;
+                    pixelFormatMap[HAL_PIXEL_FORMAT_YCBCR_420_888] = COLOR_FormatYUV420Flexible;
+                    pixelFormatMap[HAL_PIXEL_FORMAT_YCBCR_P010]    = COLOR_FormatYUVP010;
+                    pixelFormatMap[HAL_PIXEL_FORMAT_RGBA_1010102]  = COLOR_Format32bitABGR2101010;
+                    pixelFormatMap[HAL_PIXEL_FORMAT_RGBA_FP16]     = COLOR_Format64bitABGRFloat;
+
+                    std::shared_ptr<C2StoreFlexiblePixelFormatDescriptorsInfo> pixelFormatInfo;
+                    std::vector<std::unique_ptr<C2Param>> heapParams;
+                    if (client->query(
+                                {},
+                                {C2StoreFlexiblePixelFormatDescriptorsInfo::PARAM_TYPE},
+                                C2_MAY_BLOCK,
+                                &heapParams) == C2_OK
+                            && heapParams.size() == 1u) {
+                        pixelFormatInfo.reset(C2StoreFlexiblePixelFormatDescriptorsInfo::From(
+                                heapParams[0].release()));
+                    }
+                    if (pixelFormatInfo && *pixelFormatInfo) {
+                        for (size_t i = 0; i < pixelFormatInfo->flexCount(); ++i) {
+                            C2FlexiblePixelFormatDescriptorStruct &desc =
+                                pixelFormatInfo->m.values[i];
+                            std::optional<int32_t> colorFormat = findFrameworkColorFormat(desc);
+                            if (colorFormat) {
+                                pixelFormatMap[desc.pixelFormat] = *colorFormat;
+                            }
+                        }
+                    }
+                }
+                addSupportedColorFormats(
+                        intf, caps.get(), trait, mediaType, it->second);
             }
         }
     }
diff --git a/media/codec2/sfplugin/FrameReassembler.cpp b/media/codec2/sfplugin/FrameReassembler.cpp
index af054c7..cb8b6ab 100644
--- a/media/codec2/sfplugin/FrameReassembler.cpp
+++ b/media/codec2/sfplugin/FrameReassembler.cpp
@@ -88,8 +88,7 @@
         const sp<MediaCodecBuffer> &buffer,
         std::list<std::unique_ptr<C2Work>> *items) {
     int64_t timeUs;
-    if (buffer->size() == 0u
-            || !buffer->meta()->findInt64("timeUs", &timeUs)) {
+    if (!buffer->meta()->findInt64("timeUs", &timeUs)) {
         return C2_BAD_VALUE;
     }
 
diff --git a/media/codec2/sfplugin/tests/CCodecConfig_test.cpp b/media/codec2/sfplugin/tests/CCodecConfig_test.cpp
index 7c660dc..3615289 100644
--- a/media/codec2/sfplugin/tests/CCodecConfig_test.cpp
+++ b/media/codec2/sfplugin/tests/CCodecConfig_test.cpp
@@ -224,6 +224,17 @@
                                 Copy<C2StreamBitrateInfo::output, C2StreamBitrateInfo::input>,
                                 mInputBitrate)
                             .build());
+
+                    addParameter(
+                            DefineParam(mOutputProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+                            .withDefault(new C2StreamProfileLevelInfo::output(
+                                    0u, PROFILE_UNUSED, LEVEL_UNUSED))
+                            .withFields({
+                                C2F(mOutputProfileLevel, profile).any(),
+                                C2F(mOutputProfileLevel, level).any(),
+                            })
+                            .withSetter(Setter<C2StreamProfileLevelInfo::output>)
+                            .build());
                 }
 
                 // TODO: more SDK params
@@ -241,6 +252,8 @@
             std::shared_ptr<C2StreamPixelAspectRatioInfo::output> mPixelAspectRatio;
             std::shared_ptr<C2StreamBitrateInfo::input> mInputBitrate;
             std::shared_ptr<C2StreamBitrateInfo::output> mOutputBitrate;
+            std::shared_ptr<C2StreamProfileLevelInfo::input> mInputProfileLevel;
+            std::shared_ptr<C2StreamProfileLevelInfo::output> mOutputProfileLevel;
 
             template<typename T>
             static C2R Setter(bool, C2P<T> &) {
@@ -576,4 +589,51 @@
             << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
 }
 
+typedef std::tuple<std::string, C2Config::profile_t, int32_t> HdrProfilesParams;
+
+class HdrProfilesTest
+    : public CCodecConfigTest,
+      public ::testing::WithParamInterface<HdrProfilesParams> {
+};
+
+TEST_P(HdrProfilesTest, SetFromSdk) {
+    HdrProfilesParams params = GetParam();
+    std::string mediaType = std::get<0>(params);
+    C2Config::profile_t c2Profile = std::get<1>(params);
+    int32_t sdkProfile = std::get<2>(params);
+
+    init(C2Component::DOMAIN_VIDEO, C2Component::KIND_ENCODER, mediaType.c_str());
+
+    ASSERT_EQ(OK, mConfig.initialize(mReflector, mConfigurable));
+
+    sp<AMessage> format{new AMessage};
+    format->setInt32(KEY_PROFILE, sdkProfile);
+
+    std::vector<std::unique_ptr<C2Param>> configUpdate;
+    ASSERT_EQ(OK, mConfig.getConfigUpdateFromSdkParams(
+            mConfigurable, format, D::ALL, C2_MAY_BLOCK, &configUpdate));
+
+    ASSERT_EQ(1u, configUpdate.size());
+    C2StreamProfileLevelInfo::input *pl =
+        FindParam<std::remove_pointer<decltype(pl)>::type>(configUpdate);
+    ASSERT_NE(nullptr, pl);
+    ASSERT_EQ(c2Profile, pl->profile);
+}
+
+HdrProfilesParams kHdrProfilesParams[] = {
+    std::make_tuple(MIMETYPE_VIDEO_HEVC, PROFILE_HEVC_MAIN_10, HEVCProfileMain10HDR10),
+    std::make_tuple(MIMETYPE_VIDEO_HEVC, PROFILE_HEVC_MAIN_10, HEVCProfileMain10HDR10Plus),
+    std::make_tuple(MIMETYPE_VIDEO_VP9,  PROFILE_VP9_2,        VP9Profile2HDR),
+    std::make_tuple(MIMETYPE_VIDEO_VP9,  PROFILE_VP9_2,        VP9Profile2HDR10Plus),
+    std::make_tuple(MIMETYPE_VIDEO_VP9,  PROFILE_VP9_3,        VP9Profile3HDR),
+    std::make_tuple(MIMETYPE_VIDEO_VP9,  PROFILE_VP9_3,        VP9Profile3HDR10Plus),
+    std::make_tuple(MIMETYPE_VIDEO_AV1,  PROFILE_AV1_0,        AV1ProfileMain10HDR10),
+    std::make_tuple(MIMETYPE_VIDEO_AV1,  PROFILE_AV1_0,        AV1ProfileMain10HDR10Plus),
+};
+
+INSTANTIATE_TEST_SUITE_P(
+        CCodecConfig,
+        HdrProfilesTest,
+        ::testing::ValuesIn(kHdrProfilesParams));
+
 } // namespace android
diff --git a/media/codec2/sfplugin/tests/FrameReassembler_test.cpp b/media/codec2/sfplugin/tests/FrameReassembler_test.cpp
index 6738ee7..0be934a 100644
--- a/media/codec2/sfplugin/tests/FrameReassembler_test.cpp
+++ b/media/codec2/sfplugin/tests/FrameReassembler_test.cpp
@@ -53,7 +53,8 @@
             C2Config::pcm_encoding_t encoding,
             size_t inputFrameSizeInBytes,
             size_t count,
-            size_t expectedOutputSize) {
+            size_t expectedOutputSize,
+            bool separateEos) {
         FrameReassembler frameReassembler;
         frameReassembler.init(
                 mPool,
@@ -67,7 +68,7 @@
 
         size_t inputIndex = 0, outputIndex = 0;
         size_t expectCount = 0;
-        for (size_t i = 0; i < count; ++i) {
+        for (size_t i = 0; i < count + (separateEos ? 1 : 0); ++i) {
             sp<MediaCodecBuffer> buffer = new MediaCodecBuffer(
                     new AMessage, new ABuffer(inputFrameSizeInBytes));
             buffer->setRange(0, inputFrameSizeInBytes);
@@ -77,8 +78,12 @@
             if (i == count - 1) {
                 buffer->meta()->setInt32("eos", 1);
             }
-            for (size_t j = 0; j < inputFrameSizeInBytes; ++j, ++inputIndex) {
-                buffer->base()[j] = (inputIndex & 0xFF);
+            if (i == count && separateEos) {
+                buffer->setRange(0, 0);
+            } else {
+                for (size_t j = 0; j < inputFrameSizeInBytes; ++j, ++inputIndex) {
+                    buffer->base()[j] = (inputIndex & 0xFF);
+                }
             }
             std::list<std::unique_ptr<C2Work>> items;
             ASSERT_EQ(C2_OK, frameReassembler.process(buffer, &items));
@@ -105,7 +110,8 @@
                 ASSERT_EQ(encoderFrameSize * BytesPerSample(encoding), view.capacity());
                 for (size_t j = 0; j < view.capacity(); ++j, ++outputIndex) {
                     ASSERT_TRUE(outputIndex < inputIndex
-                             || inputIndex == inputFrameSizeInBytes * count);
+                             || inputIndex == inputFrameSizeInBytes * count)
+                        << "inputIndex = " << inputIndex << " outputIndex = " << outputIndex;
                     uint8_t expected = outputIndex < inputIndex ? (outputIndex & 0xFF) : 0;
                     if (expectCount < 10) {
                         ++expectCount;
@@ -137,204 +143,239 @@
 // Push frames with exactly the same size as the encoder requested.
 TEST_F(FrameReassemblerTest, PushExactFrameSize) {
     ASSERT_EQ(OK, initStatus());
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_8,
-            1024 /* input frame size in bytes = 1024 samples * 1 channel * 1 bytes/sample */,
-            10 /* count */,
-            10240 /* expected output size = 10 * 1024 bytes/frame */);
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_16,
-            2048 /* input frame size in bytes = 1024 samples * 1 channel * 2 bytes/sample */,
-            10 /* count */,
-            20480 /* expected output size = 10 * 2048 bytes/frame */);
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_FLOAT,
-            4096 /* input frame size in bytes = 1024 samples * 1 channel * 4 bytes/sample */,
-            10 /* count */,
-            40960 /* expected output size = 10 * 4096 bytes/frame */);
+    for (bool separateEos : {false, true}) {
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_8,
+                1024 /* input frame size in bytes = 1024 samples * 1 channel * 1 bytes/sample */,
+                10 /* count */,
+                10240 /* expected output size = 10 * 1024 bytes/frame */,
+                separateEos);
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_16,
+                2048 /* input frame size in bytes = 1024 samples * 1 channel * 2 bytes/sample */,
+                10 /* count */,
+                20480 /* expected output size = 10 * 2048 bytes/frame */,
+                separateEos);
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_FLOAT,
+                4096 /* input frame size in bytes = 1024 samples * 1 channel * 4 bytes/sample */,
+                10 /* count */,
+                40960 /* expected output size = 10 * 4096 bytes/frame */,
+                separateEos);
+    }
 }
 
 // Push frames with half the size that the encoder requested.
 TEST_F(FrameReassemblerTest, PushHalfFrameSize) {
     ASSERT_EQ(OK, initStatus());
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_8,
-            512 /* input frame size in bytes = 512 samples * 1 channel * 1 bytes per sample */,
-            10 /* count */,
-            5120 /* expected output size = 5 * 1024 bytes/frame */);
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_16,
-            1024 /* input frame size in bytes = 512 samples * 1 channel * 2 bytes per sample */,
-            10 /* count */,
-            10240 /* expected output size = 5 * 2048 bytes/frame */);
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_FLOAT,
-            2048 /* input frame size in bytes = 512 samples * 1 channel * 4 bytes per sample */,
-            10 /* count */,
-            20480 /* expected output size = 5 * 4096 bytes/frame */);
+    for (bool separateEos : {false, true}) {
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_8,
+                512 /* input frame size in bytes = 512 samples * 1 channel * 1 bytes/sample */,
+                10 /* count */,
+                5120 /* expected output size = 5 * 1024 bytes/frame */,
+                separateEos);
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_16,
+                1024 /* input frame size in bytes = 512 samples * 1 channel * 2 bytes/sample */,
+                10 /* count */,
+                10240 /* expected output size = 5 * 2048 bytes/frame */,
+                separateEos);
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_FLOAT,
+                2048 /* input frame size in bytes = 512 samples * 1 channel * 4 bytes/sample */,
+                10 /* count */,
+                20480 /* expected output size = 5 * 4096 bytes/frame */,
+                separateEos);
+    }
 }
 
 // Push frames with twice the size that the encoder requested.
 TEST_F(FrameReassemblerTest, PushDoubleFrameSize) {
     ASSERT_EQ(OK, initStatus());
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_8,
-            2048 /* input frame size in bytes = 2048 samples * 1 channel * 1 bytes per sample */,
-            10 /* count */,
-            20480 /* expected output size = 20 * 1024 bytes/frame */);
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_16,
-            4096 /* input frame size in bytes = 2048 samples * 1 channel * 2 bytes per sample */,
-            10 /* count */,
-            40960 /* expected output size = 20 * 2048 bytes/frame */);
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_FLOAT,
-            8192 /* input frame size in bytes = 2048 samples * 1 channel * 4 bytes per sample */,
-            10 /* count */,
-            81920 /* expected output size = 20 * 4096 bytes/frame */);
+    for (bool separateEos : {false, true}) {
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_8,
+                2048 /* input frame size in bytes = 2048 samples * 1 channel * 1 bytes/sample */,
+                10 /* count */,
+                20480 /* expected output size = 20 * 1024 bytes/frame */,
+                separateEos);
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_16,
+                4096 /* input frame size in bytes = 2048 samples * 1 channel * 2 bytes/sample */,
+                10 /* count */,
+                40960 /* expected output size = 20 * 2048 bytes/frame */,
+                separateEos);
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_FLOAT,
+                8192 /* input frame size in bytes = 2048 samples * 1 channel * 4 bytes/sample */,
+                10 /* count */,
+                81920 /* expected output size = 20 * 4096 bytes/frame */,
+                separateEos);
+    }
 }
 
 // Push frames with a little bit larger (+5 samples) than the requested size.
 TEST_F(FrameReassemblerTest, PushLittleLargerFrameSize) {
     ASSERT_EQ(OK, initStatus());
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_8,
-            1029 /* input frame size in bytes = 1029 samples * 1 channel * 1 bytes per sample */,
-            10 /* count */,
-            11264 /* expected output size = 11 * 1024 bytes/frame */);
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_16,
-            2058 /* input frame size in bytes = 1029 samples * 1 channel * 2 bytes per sample */,
-            10 /* count */,
-            22528 /* expected output size = 11 * 2048 bytes/frame */);
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_FLOAT,
-            4116 /* input frame size in bytes = 1029 samples * 1 channel * 4 bytes per sample */,
-            10 /* count */,
-            45056 /* expected output size = 11 * 4096 bytes/frame */);
+    for (bool separateEos : {false, true}) {
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_8,
+                1029 /* input frame size in bytes = 1029 samples * 1 channel * 1 bytes/sample */,
+                10 /* count */,
+                11264 /* expected output size = 11 * 1024 bytes/frame */,
+                separateEos);
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_16,
+                2058 /* input frame size in bytes = 1029 samples * 1 channel * 2 bytes/sample */,
+                10 /* count */,
+                22528 /* expected output size = 11 * 2048 bytes/frame */,
+                separateEos);
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_FLOAT,
+                4116 /* input frame size in bytes = 1029 samples * 1 channel * 4 bytes/sample */,
+                10 /* count */,
+                45056 /* expected output size = 11 * 4096 bytes/frame */,
+                separateEos);
+    }
 }
 
 // Push frames with a little bit smaller (-5 samples) than the requested size.
 TEST_F(FrameReassemblerTest, PushLittleSmallerFrameSize) {
     ASSERT_EQ(OK, initStatus());
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_8,
-            1019 /* input frame size in bytes = 1019 samples * 1 channel * 1 bytes per sample */,
-            10 /* count */,
-            10240 /* expected output size = 10 * 1024 bytes/frame */);
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_16,
-            2038 /* input frame size in bytes = 1019 samples * 1 channel * 2 bytes per sample */,
-            10 /* count */,
-            20480 /* expected output size = 10 * 2048 bytes/frame */);
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_FLOAT,
-            4076 /* input frame size in bytes = 1019 samples * 1 channel * 4 bytes per sample */,
-            10 /* count */,
-            40960 /* expected output size = 10 * 4096 bytes/frame */);
+    for (bool separateEos : {false, true}) {
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_8,
+                1019 /* input frame size in bytes = 1019 samples * 1 channel * 1 bytes/sample */,
+                10 /* count */,
+                10240 /* expected output size = 10 * 1024 bytes/frame */,
+                separateEos);
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_16,
+                2038 /* input frame size in bytes = 1019 samples * 1 channel * 2 bytes/sample */,
+                10 /* count */,
+                20480 /* expected output size = 10 * 2048 bytes/frame */,
+                separateEos);
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_FLOAT,
+                4076 /* input frame size in bytes = 1019 samples * 1 channel * 4 bytes/sample */,
+                10 /* count */,
+                40960 /* expected output size = 10 * 4096 bytes/frame */,
+                separateEos);
+    }
 }
 
 // Push single-byte frames
 TEST_F(FrameReassemblerTest, PushSingleByte) {
     ASSERT_EQ(OK, initStatus());
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_8,
-            1 /* input frame size in bytes */,
-            100000 /* count */,
-            100352 /* expected output size = 98 * 1024 bytes/frame */);
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_16,
-            1 /* input frame size in bytes */,
-            100000 /* count */,
-            100352 /* expected output size = 49 * 2048 bytes/frame */);
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_FLOAT,
-            1 /* input frame size in bytes */,
-            100000 /* count */,
-            102400 /* expected output size = 25 * 4096 bytes/frame */);
+    for (bool separateEos : {false, true}) {
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_8,
+                1 /* input frame size in bytes */,
+                100000 /* count */,
+                100352 /* expected output size = 98 * 1024 bytes/frame */,
+                separateEos);
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_16,
+                1 /* input frame size in bytes */,
+                100000 /* count */,
+                100352 /* expected output size = 49 * 2048 bytes/frame */,
+                separateEos);
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_FLOAT,
+                1 /* input frame size in bytes */,
+                100000 /* count */,
+                102400 /* expected output size = 25 * 4096 bytes/frame */,
+                separateEos);
+    }
 }
 
 // Push one big chunk.
 TEST_F(FrameReassemblerTest, PushBigChunk) {
     ASSERT_EQ(OK, initStatus());
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_8,
-            100000 /* input frame size in bytes */,
-            1 /* count */,
-            100352 /* expected output size = 98 * 1024 bytes/frame */);
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_16,
-            100000 /* input frame size in bytes */,
-            1 /* count */,
-            100352 /* expected output size = 49 * 2048 bytes/frame */);
-    testPushSameSize(
-            1024 /* frame size in samples */,
-            48000 /* sample rate */,
-            1 /* channel count */,
-            PCM_FLOAT,
-            100000 /* input frame size in bytes */,
-            1 /* count */,
-            102400 /* expected output size = 25 * 4096 bytes/frame */);
+    for (bool separateEos : {false, true}) {
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_8,
+                100000 /* input frame size in bytes */,
+                1 /* count */,
+                100352 /* expected output size = 98 * 1024 bytes/frame */,
+                separateEos);
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_16,
+                100000 /* input frame size in bytes */,
+                1 /* count */,
+                100352 /* expected output size = 49 * 2048 bytes/frame */,
+                separateEos);
+        testPushSameSize(
+                1024 /* frame size in samples */,
+                48000 /* sample rate */,
+                1 /* channel count */,
+                PCM_FLOAT,
+                100000 /* input frame size in bytes */,
+                1 /* count */,
+                102400 /* expected output size = 25 * 4096 bytes/frame */,
+                separateEos);
+    }
 }
 
 } // namespace android
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
index 5f87c66..7fc4c27 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
@@ -16,7 +16,9 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "Codec2BufferUtils"
+#define ATRACE_TAG  ATRACE_TAG_VIDEO
 #include <utils/Log.h>
+#include <utils/Trace.h>
 
 #include <libyuv.h>
 
@@ -36,8 +38,8 @@
 namespace {
 
 /**
- * A flippable, optimizable memcpy. Constructs such as (from ? src : dst) do not work as the results are
- * always const.
+ * A flippable, optimizable memcpy. Constructs such as (from ? src : dst)
+ * do not work as the results are always const.
  */
 template<bool ToA, size_t S>
 struct MemCopier {
@@ -88,7 +90,7 @@
         uint32_t planeW = img->mWidth / plane.colSampling;
         uint32_t planeH = img->mHeight / plane.rowSampling;
 
-        bool canCopyByRow = (plane.colInc == 1) && (img->mPlane[i].mColInc == 1);
+        bool canCopyByRow = (plane.colInc == bpp) && (img->mPlane[i].mColInc == bpp);
         bool canCopyByPlane = canCopyByRow && (plane.rowInc == img->mPlane[i].mRowInc);
         if (canCopyByPlane) {
             MemCopier<ToMediaImage, 0>::copy(imgRow, viewRow, plane.rowInc * planeH);
@@ -139,15 +141,18 @@
 
     if (IsNV12(view)) {
         if (IsNV12(img)) {
+            ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV12->NV12");
             libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
             libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width, height / 2);
             return OK;
         } else if (IsNV21(img)) {
+            ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV12->NV21");
             if (!libyuv::NV21ToNV12(src_y, src_stride_y, src_u, src_stride_u,
                                     dst_y, dst_stride_y, dst_v, dst_stride_v, width, height)) {
                 return OK;
             }
         } else if (IsI420(img)) {
+            ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV12->I420");
             if (!libyuv::NV12ToI420(src_y, src_stride_y, src_u, src_stride_u, dst_y, dst_stride_y,
                                     dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
                 return OK;
@@ -155,15 +160,18 @@
         }
     } else if (IsNV21(view)) {
         if (IsNV12(img)) {
+            ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV21->NV12");
             if (!libyuv::NV21ToNV12(src_y, src_stride_y, src_v, src_stride_v,
                                     dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
                 return OK;
             }
         } else if (IsNV21(img)) {
+            ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV21->NV21");
             libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
             libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width, height / 2);
             return OK;
         } else if (IsI420(img)) {
+            ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV21->I420");
             if (!libyuv::NV21ToI420(src_y, src_stride_y, src_v, src_stride_v, dst_y, dst_stride_y,
                                     dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
                 return OK;
@@ -171,22 +179,26 @@
         }
     } else if (IsI420(view)) {
         if (IsNV12(img)) {
+            ScopedTrace trace(ATRACE_TAG, "ImageCopy: I420->NV12");
             if (!libyuv::I420ToNV12(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
                                     dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
                 return OK;
             }
         } else if (IsNV21(img)) {
+            ScopedTrace trace(ATRACE_TAG, "ImageCopy: I420->NV21");
             if (!libyuv::I420ToNV21(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
                                     dst_y, dst_stride_y, dst_v, dst_stride_v, width, height)) {
                 return OK;
             }
         } else if (IsI420(img)) {
+            ScopedTrace trace(ATRACE_TAG, "ImageCopy: I420->I420");
             libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
             libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width / 2, height / 2);
             libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width / 2, height / 2);
             return OK;
         }
     }
+    ScopedTrace trace(ATRACE_TAG, "ImageCopy: generic");
     return _ImageCopy<true>(view, img, imgBase);
 }
 
@@ -210,15 +222,18 @@
     int height = view.crop().height;
     if (IsNV12(img)) {
         if (IsNV12(view)) {
+            ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV12->NV12");
             libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
             libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width, height / 2);
             return OK;
         } else if (IsNV21(view)) {
+            ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV12->NV21");
             if (!libyuv::NV21ToNV12(src_y, src_stride_y, src_u, src_stride_u,
                                     dst_y, dst_stride_y, dst_v, dst_stride_v, width, height)) {
                 return OK;
             }
         } else if (IsI420(view)) {
+            ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV12->I420");
             if (!libyuv::NV12ToI420(src_y, src_stride_y, src_u, src_stride_u, dst_y, dst_stride_y,
                                     dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
                 return OK;
@@ -226,15 +241,18 @@
         }
     } else if (IsNV21(img)) {
         if (IsNV12(view)) {
+            ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV21->NV12");
             if (!libyuv::NV21ToNV12(src_y, src_stride_y, src_v, src_stride_v,
                                     dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
                 return OK;
             }
         } else if (IsNV21(view)) {
+            ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV21->NV21");
             libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
             libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width, height / 2);
             return OK;
         } else if (IsI420(view)) {
+            ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV21->I420");
             if (!libyuv::NV21ToI420(src_y, src_stride_y, src_v, src_stride_v, dst_y, dst_stride_y,
                                     dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
                 return OK;
@@ -242,22 +260,26 @@
         }
     } else if (IsI420(img)) {
         if (IsNV12(view)) {
+            ScopedTrace trace(ATRACE_TAG, "ImageCopy: I420->NV12");
             if (!libyuv::I420ToNV12(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
                                     dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
                 return OK;
             }
         } else if (IsNV21(view)) {
+            ScopedTrace trace(ATRACE_TAG, "ImageCopy: I420->NV21");
             if (!libyuv::I420ToNV21(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
                                     dst_y, dst_stride_y, dst_v, dst_stride_v, width, height)) {
                 return OK;
             }
         } else if (IsI420(view)) {
+            ScopedTrace trace(ATRACE_TAG, "ImageCopy: I420->I420");
             libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
             libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width / 2, height / 2);
             libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width / 2, height / 2);
             return OK;
         }
     }
+    ScopedTrace trace(ATRACE_TAG, "ImageCopy: generic");
     return _ImageCopy<false>(view, img, imgBase);
 }
 
diff --git a/media/codec2/sfplugin/utils/Codec2Mapper.cpp b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
index 4d939fa..3a94016 100644
--- a/media/codec2/sfplugin/utils/Codec2Mapper.cpp
+++ b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
@@ -18,6 +18,9 @@
 #define LOG_TAG "Codec2Mapper"
 #include <utils/Log.h>
 
+#include <map>
+#include <optional>
+
 #include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/SurfaceUtils.h>
 #include <media/stagefright/foundation/ALookup.h>
@@ -167,6 +170,9 @@
     { C2Config::LEVEL_DV_MAIN_UHD_30, DolbyVisionLevelUhd30 },
     { C2Config::LEVEL_DV_MAIN_UHD_48, DolbyVisionLevelUhd48 },
     { C2Config::LEVEL_DV_MAIN_UHD_60, DolbyVisionLevelUhd60 },
+    { C2Config::LEVEL_DV_MAIN_UHD_120, DolbyVisionLevelUhd120 },
+    { C2Config::LEVEL_DV_MAIN_8K_30,  DolbyVisionLevel8k30 },
+    { C2Config::LEVEL_DV_MAIN_8K_60,  DolbyVisionLevel8k60 },
 
     // high tiers are not yet supported on android, for now map them to main tier
     { C2Config::LEVEL_DV_HIGH_HD_24,  DolbyVisionLevelHd24 },
@@ -178,6 +184,9 @@
     { C2Config::LEVEL_DV_HIGH_UHD_30, DolbyVisionLevelUhd30 },
     { C2Config::LEVEL_DV_HIGH_UHD_48, DolbyVisionLevelUhd48 },
     { C2Config::LEVEL_DV_HIGH_UHD_60, DolbyVisionLevelUhd60 },
+    { C2Config::LEVEL_DV_HIGH_UHD_120, DolbyVisionLevelUhd120 },
+    { C2Config::LEVEL_DV_HIGH_8K_30,  DolbyVisionLevel8k30 },
+    { C2Config::LEVEL_DV_HIGH_8K_60,  DolbyVisionLevel8k60 },
 };
 
 ALookup<C2Config::profile_t, int32_t> sDolbyVisionProfiles = {
@@ -255,6 +264,8 @@
     { C2Config::PROFILE_HEVC_MAIN_STILL, HEVCProfileMainStill },
     { C2Config::PROFILE_HEVC_MAIN_INTRA, HEVCProfileMain },
     { C2Config::PROFILE_HEVC_MAIN_10_INTRA, HEVCProfileMain10 },
+    { C2Config::PROFILE_HEVC_MAIN_10, HEVCProfileMain10HDR10 },
+    { C2Config::PROFILE_HEVC_MAIN_10, HEVCProfileMain10HDR10Plus },
 };
 
 ALookup<C2Config::profile_t, int32_t> sHevcHdrProfiles = {
@@ -381,15 +392,17 @@
     { C2Config::LEVEL_AV1_7_3,  AV1Level73 },
 };
 
-
 ALookup<C2Config::profile_t, int32_t> sAv1Profiles = {
-    // TODO: will need to disambiguate between Main8 and Main10
     { C2Config::PROFILE_AV1_0, AV1ProfileMain8 },
     { C2Config::PROFILE_AV1_0, AV1ProfileMain10 },
     { C2Config::PROFILE_AV1_0, AV1ProfileMain10HDR10 },
     { C2Config::PROFILE_AV1_0, AV1ProfileMain10HDR10Plus },
 };
 
+ALookup<C2Config::profile_t, int32_t> sAv1TenbitProfiles = {
+    { C2Config::PROFILE_AV1_0, AV1ProfileMain10 },
+};
+
 ALookup<C2Config::profile_t, int32_t> sAv1HdrProfiles = {
     { C2Config::PROFILE_AV1_0, AV1ProfileMain10HDR10 },
 };
@@ -398,6 +411,37 @@
     { C2Config::PROFILE_AV1_0, AV1ProfileMain10HDR10Plus },
 };
 
+// HAL_PIXEL_FORMAT_* -> COLOR_Format*
+ALookup<uint32_t, int32_t> sPixelFormats = {
+    { HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, COLOR_FormatSurface },
+
+    // YCBCR_420_888 maps to YUV420Flexible and vice versa
+    { HAL_PIXEL_FORMAT_YCBCR_420_888,          COLOR_FormatYUV420Flexible },
+
+    // Fallback matches for YCBCR_420_888
+    { HAL_PIXEL_FORMAT_YCBCR_420_888,          COLOR_FormatYUV420Planar },
+    { HAL_PIXEL_FORMAT_YCBCR_420_888,          COLOR_FormatYUV420SemiPlanar },
+    { HAL_PIXEL_FORMAT_YCBCR_420_888,          COLOR_FormatYUV420PackedPlanar },
+    { HAL_PIXEL_FORMAT_YCBCR_420_888,          COLOR_FormatYUV420PackedSemiPlanar },
+
+    // Fallback matches for YUV420Flexible
+    { HAL_PIXEL_FORMAT_YCRCB_420_SP,           COLOR_FormatYUV420Flexible },
+    { HAL_PIXEL_FORMAT_YV12,                   COLOR_FormatYUV420Flexible },
+
+    { HAL_PIXEL_FORMAT_YCBCR_422_SP,           COLOR_FormatYUV422PackedSemiPlanar },
+    { HAL_PIXEL_FORMAT_YCBCR_422_I,            COLOR_FormatYUV422PackedPlanar },
+    { HAL_PIXEL_FORMAT_YCBCR_P010,             COLOR_FormatYUVP010 },
+    { HAL_PIXEL_FORMAT_RGBA_1010102,           COLOR_Format32bitABGR2101010 },
+    { HAL_PIXEL_FORMAT_RGBA_FP16,              COLOR_Format64bitABGRFloat },
+};
+
+ALookup<C2Config::picture_type_t, int32_t> sPictureType = {
+    { C2Config::picture_type_t::SYNC_FRAME,     PICTURE_TYPE_I },
+    { C2Config::picture_type_t::I_FRAME,        PICTURE_TYPE_I },
+    { C2Config::picture_type_t::P_FRAME,        PICTURE_TYPE_P },
+    { C2Config::picture_type_t::B_FRAME,        PICTURE_TYPE_B },
+};
+
 /**
  * A helper that passes through vendor extension profile and level values.
  */
@@ -603,9 +647,9 @@
 };
 
 struct Av1ProfileLevelMapper : ProfileLevelMapperHelper {
-    Av1ProfileLevelMapper(bool isHdr = false, bool isHdr10Plus = false) :
+    Av1ProfileLevelMapper(bool isHdr = false, bool isHdr10Plus = false, int32_t bitDepth = 8) :
         ProfileLevelMapperHelper(),
-        mIsHdr(isHdr), mIsHdr10Plus(isHdr10Plus) {}
+        mIsHdr(isHdr), mIsHdr10Plus(isHdr10Plus), mBitDepth(bitDepth) {}
 
     virtual bool simpleMap(C2Config::level_t from, int32_t *to) {
         return sAv1Levels.map(from, to);
@@ -614,19 +658,22 @@
         return sAv1Levels.map(from, to);
     }
     virtual bool simpleMap(C2Config::profile_t from, int32_t *to) {
-        return mIsHdr10Plus ? sAv1Hdr10PlusProfiles.map(from, to) :
-                     mIsHdr ? sAv1HdrProfiles.map(from, to) :
-                              sAv1Profiles.map(from, to);
+        return (mBitDepth == 10) ? sAv1TenbitProfiles.map(from, to) :
+                    mIsHdr10Plus ? sAv1Hdr10PlusProfiles.map(from, to) :
+                          mIsHdr ? sAv1HdrProfiles.map(from, to) :
+                                   sAv1Profiles.map(from, to);
     }
     virtual bool simpleMap(int32_t from, C2Config::profile_t *to) {
-        return mIsHdr10Plus ? sAv1Hdr10PlusProfiles.map(from, to) :
-                     mIsHdr ? sAv1HdrProfiles.map(from, to) :
-                              sAv1Profiles.map(from, to);
+        return (mBitDepth == 10) ? sAv1TenbitProfiles.map(from, to) :
+                    mIsHdr10Plus ? sAv1Hdr10PlusProfiles.map(from, to) :
+                          mIsHdr ? sAv1HdrProfiles.map(from, to) :
+                                   sAv1Profiles.map(from, to);
     }
 
 private:
     bool mIsHdr;
     bool mIsHdr10Plus;
+    int32_t mBitDepth;
 };
 
 } // namespace
@@ -674,6 +721,18 @@
 }
 
 // static
+std::shared_ptr<C2Mapper::ProfileLevelMapper>
+C2Mapper::GetBitDepthProfileLevelMapper(std::string mediaType, int32_t bitDepth) {
+    std::transform(mediaType.begin(), mediaType.end(), mediaType.begin(), ::tolower);
+    if (bitDepth == 8) {
+        return GetProfileLevelMapper(mediaType);
+    } else if (mediaType == MIMETYPE_VIDEO_AV1 && bitDepth == 10) {
+        return std::make_shared<Av1ProfileLevelMapper>(false, false, bitDepth);
+    }
+    return nullptr;
+}
+
+// static
 bool C2Mapper::map(C2Config::bitrate_mode_t from, int32_t *to) {
     return sBitrateModes.map(from, to);
 }
@@ -956,41 +1015,29 @@
 // static
 bool C2Mapper::mapPixelFormatFrameworkToCodec(
         int32_t frameworkValue, uint32_t *c2Value) {
-    switch (frameworkValue) {
-        case COLOR_FormatSurface:
-            *c2Value = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
-            return true;
-        case COLOR_FormatYUV420Flexible:
-        case COLOR_FormatYUV420Planar:
-        case COLOR_FormatYUV420SemiPlanar:
-        case COLOR_FormatYUV420PackedPlanar:
-        case COLOR_FormatYUV420PackedSemiPlanar:
-            *c2Value = HAL_PIXEL_FORMAT_YCBCR_420_888;
-            return true;
-        default:
-            // Passthrough
-            *c2Value = uint32_t(frameworkValue);
-            return true;
+    if (!sPixelFormats.map(frameworkValue, c2Value)) {
+        // passthrough if not mapped
+        *c2Value = uint32_t(frameworkValue);
     }
+    return true;
 }
 
 // static
 bool C2Mapper::mapPixelFormatCodecToFramework(
         uint32_t c2Value, int32_t *frameworkValue) {
-    switch (c2Value) {
-        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
-            *frameworkValue = COLOR_FormatSurface;
-            return true;
-        case HAL_PIXEL_FORMAT_YCBCR_422_SP:
-        case HAL_PIXEL_FORMAT_YCRCB_420_SP:
-        case HAL_PIXEL_FORMAT_YCBCR_422_I:
-        case HAL_PIXEL_FORMAT_YCBCR_420_888:
-        case HAL_PIXEL_FORMAT_YV12:
-            *frameworkValue = COLOR_FormatYUV420Flexible;
-            return true;
-        default:
-            // Passthrough
-            *frameworkValue = int32_t(c2Value);
-            return true;
+    if (!sPixelFormats.map(c2Value, frameworkValue)) {
+        // passthrough if not mapped
+        *frameworkValue = int32_t(c2Value);
     }
+    return true;
+}
+
+// static
+bool C2Mapper::map(C2Config::picture_type_t from, int32_t *to) {
+    return sPictureType.map(from, to);
+}
+
+// static
+bool C2Mapper::map(int32_t from, C2Config::picture_type_t *to) {
+    return sPictureType.map(from, to);
 }
diff --git a/media/codec2/sfplugin/utils/Codec2Mapper.h b/media/codec2/sfplugin/utils/Codec2Mapper.h
index 797c8a8..33d305e 100644
--- a/media/codec2/sfplugin/utils/Codec2Mapper.h
+++ b/media/codec2/sfplugin/utils/Codec2Mapper.h
@@ -43,6 +43,9 @@
         static std::shared_ptr<ProfileLevelMapper>
         GetHdrProfileLevelMapper(std::string mediaType, bool isHdr10Plus = false);
 
+        static std::shared_ptr<ProfileLevelMapper>
+        GetBitDepthProfileLevelMapper(std::string mediaType, int32_t bitDepth = 8);
+
         // convert between bitrates
         static bool map(C2Config::bitrate_mode_t, int32_t*);
         static bool map(int32_t, C2Config::bitrate_mode_t*);
diff --git a/media/codec2/tests/Android.bp b/media/codec2/tests/Android.bp
index b858fa5..68db7b2 100644
--- a/media/codec2/tests/Android.bp
+++ b/media/codec2/tests/Android.bp
@@ -39,6 +39,8 @@
 cc_test {
     name: "codec2_vndk_test",
     test_suites: ["device-tests"],
+    // This test doesn't seem to support isolated with current assumption
+    isolated: false,
 
     srcs: [
         "C2_test.cpp",
diff --git a/media/codec2/vndk/Android.bp b/media/codec2/vndk/Android.bp
index be81c84..27cd1f8 100644
--- a/media/codec2/vndk/Android.bp
+++ b/media/codec2/vndk/Android.bp
@@ -73,11 +73,12 @@
         "libbase",
         "libcutils",
         "libdl",
+        "libdmabufheap",
+        "libfmq",
+        "libgralloctypes",
         "libhardware",
         "libhidlbase",
         "libion",
-        "libdmabufheap",
-        "libfmq",
         "liblog",
         "libnativewindow",
         "libstagefright_foundation",
@@ -92,6 +93,44 @@
     ],
 }
 
+// public dependency for statically linking to libcodec2_vndk for unit tests
+cc_defaults {
+    name: "libcodec2-static-defaults",
+
+    static_libs: [
+        "liblog",
+        "libion",
+        "libfmq",
+        "libbase",
+        "libutils",
+        "libcutils",
+        "libcodec2",
+        "libhidlbase",
+        "libdmabufheap",
+        "libcodec2_vndk",
+        "libnativewindow",
+        "libcodec2_soft_common",
+        "libsfplugin_ccodec_utils",
+        "libstagefright_foundation",
+        "libstagefright_bufferpool@2.0.1",
+        "libgralloctypes",
+        "android.hardware.graphics.mapper@2.0",
+        "android.hardware.graphics.mapper@3.0",
+        "android.hardware.media.bufferpool@2.0",
+        "android.hardware.graphics.allocator@2.0",
+        "android.hardware.graphics.allocator@3.0",
+        "android.hardware.graphics.bufferqueue@2.0",
+    ],
+
+    shared_libs: [
+        "libui",
+        "libdl",
+        "libhardware",
+        "libvndksupport",
+        "libprocessgroup",
+    ],
+}
+
 // public dependency for implementing Codec 2 components
 cc_defaults {
     name: "libcodec2-impl-defaults",
diff --git a/media/codec2/vndk/C2AllocatorGralloc.cpp b/media/codec2/vndk/C2AllocatorGralloc.cpp
index 6a7f19c..bc4053d 100644
--- a/media/codec2/vndk/C2AllocatorGralloc.cpp
+++ b/media/codec2/vndk/C2AllocatorGralloc.cpp
@@ -20,8 +20,10 @@
 
 #include <mutex>
 
+#include <aidl/android/hardware/graphics/common/PlaneLayoutComponentType.h>
 #include <android/hardware/graphics/common/1.2/types.h>
 #include <cutils/native_handle.h>
+#include <gralloctypes/Gralloc4.h>
 #include <hardware/gralloc.h>
 #include <ui/GraphicBufferAllocator.h>
 #include <ui/GraphicBufferMapper.h>
@@ -29,6 +31,7 @@
 
 #include <C2AllocatorGralloc.h>
 #include <C2Buffer.h>
+#include <C2Debug.h>
 #include <C2PlatformSupport.h>
 
 using ::android::hardware::hidl_handle;
@@ -230,8 +233,90 @@
     }
 };
 
+static
+c2_status_t Gralloc4Mapper_lock(native_handle_t *handle, uint64_t usage, const Rect& bounds,
+        C2PlanarLayout *layout, uint8_t **addr) {
+    GraphicBufferMapper &mapper = GraphicBufferMapper::get();
+
+    std::vector<ui::PlaneLayout> planes;
+    // this method is only supported on Gralloc 4 or later
+    status_t err = mapper.getPlaneLayouts(handle, &planes);
+    if (err != NO_ERROR || planes.empty()) {
+        return C2_CANNOT_DO;
+    }
+
+    uint8_t *pointer = nullptr;
+    err = mapper.lock(handle, usage, bounds, (void **)&pointer, nullptr, nullptr);
+    if (err != NO_ERROR || pointer == nullptr) {
+        return C2_CORRUPTED;
+    }
+
+    using aidl::android::hardware::graphics::common::PlaneLayoutComponentType;
+    using aidl::android::hardware::graphics::common::PlaneLayoutComponent;
+
+    layout->type = C2PlanarLayout::TYPE_YUV;
+    layout->numPlanes = 0;
+    layout->rootPlanes = 0;
+
+    for (const ui::PlaneLayout &plane : planes) {
+        layout->rootPlanes++;
+        uint32_t lastOffsetInBits = 0;
+        uint32_t rootIx = layout->numPlanes;
+
+        for (const PlaneLayoutComponent &component : plane.components) {
+            if (!gralloc4::isStandardPlaneLayoutComponentType(component.type)) {
+                mapper.unlock(handle);
+                return C2_CANNOT_DO;
+            }
+
+            uint32_t rightShiftBits = component.offsetInBits - lastOffsetInBits;
+            uint32_t allocatedDepthInBits = component.sizeInBits + rightShiftBits;
+            C2PlanarLayout::plane_index_t planeId;
+            C2PlaneInfo::channel_t channel;
+
+            switch (static_cast<PlaneLayoutComponentType>(component.type.value)) {
+                case PlaneLayoutComponentType::Y:
+                    planeId = C2PlanarLayout::PLANE_Y;
+                    channel = C2PlaneInfo::CHANNEL_Y;
+                    break;
+                case PlaneLayoutComponentType::CB:
+                    planeId = C2PlanarLayout::PLANE_U;
+                    channel = C2PlaneInfo::CHANNEL_CB;
+                    break;
+                case PlaneLayoutComponentType::CR:
+                    planeId = C2PlanarLayout::PLANE_V;
+                    channel = C2PlaneInfo::CHANNEL_CR;
+                    break;
+                default:
+                    mapper.unlock(handle);
+                    return C2_CORRUPTED;
+            }
+
+            addr[planeId] = pointer + plane.offsetInBytes + (component.offsetInBits / 8);
+            layout->planes[planeId] = {
+                channel,                                                // channel
+                static_cast<int32_t>(plane.sampleIncrementInBits / 8),  // colInc
+                static_cast<int32_t>(plane.strideInBytes),              // rowInc
+                static_cast<uint32_t>(plane.horizontalSubsampling),     // mColSampling
+                static_cast<uint32_t>(plane.verticalSubsampling),       // mRowSampling
+                allocatedDepthInBits,                                   // allocatedDepth (bits)
+                static_cast<uint32_t>(component.sizeInBits),            // bitDepth (bits)
+                rightShiftBits,                                         // rightShift (bits)
+                C2PlaneInfo::NATIVE,                                    // endianness
+                rootIx,                                                 // rootIx
+                static_cast<uint32_t>(component.offsetInBits / 8),      // offset (bytes)
+            };
+
+            layout->numPlanes++;
+            lastOffsetInBits = component.offsetInBits + component.sizeInBits;
+        }
+    }
+    return C2_OK;
+}
+
 } // unnamed namespace
 
+
 native_handle_t *UnwrapNativeCodec2GrallocHandle(const C2Handle *const handle) {
     return C2HandleGralloc::UnwrapNativeHandle(handle);
 }
@@ -385,6 +470,10 @@
                 mBuffer, mWidth, mHeight, mFormat, mGrallocUsage,
                 mStride, generation, igbp_id, igbp_slot);
     }
+
+    // 'NATIVE' on Android means LITTLE_ENDIAN
+    constexpr C2PlaneInfo::endianness_t kEndianness = C2PlaneInfo::NATIVE;
+
     switch (mFormat) {
         case static_cast<uint32_t>(PixelFormat4::RGBA_1010102): {
             // TRICKY: this is used for media as YUV444 in the case when it is queued directly to a
@@ -609,17 +698,6 @@
                 C2PlanarLayout::PLANE_V,          // rootIx
                 0,                                // offset
             };
-            // handle interleaved formats
-            intptr_t uvOffset = addr[C2PlanarLayout::PLANE_V] - addr[C2PlanarLayout::PLANE_U];
-            if (uvOffset > 0 && uvOffset < (intptr_t)ycbcrLayout.chroma_step) {
-                layout->rootPlanes = 2;
-                layout->planes[C2PlanarLayout::PLANE_V].rootIx = C2PlanarLayout::PLANE_U;
-                layout->planes[C2PlanarLayout::PLANE_V].offset = uvOffset;
-            } else if (uvOffset < 0 && uvOffset > -(intptr_t)ycbcrLayout.chroma_step) {
-                layout->rootPlanes = 2;
-                layout->planes[C2PlanarLayout::PLANE_U].rootIx = C2PlanarLayout::PLANE_V;
-                layout->planes[C2PlanarLayout::PLANE_U].offset = -uvOffset;
-            }
             break;
         }
 
@@ -646,7 +724,7 @@
                 16,                             // allocatedDepth
                 10,                             // bitDepth
                 6,                              // rightShift
-                C2PlaneInfo::LITTLE_END,        // endianness
+                kEndianness,                    // endianness
                 C2PlanarLayout::PLANE_Y,        // rootIx
                 0,                              // offset
             };
@@ -659,7 +737,7 @@
                 16,                             // allocatedDepth
                 10,                             // bitDepth
                 6,                              // rightShift
-                C2PlaneInfo::LITTLE_END,        // endianness
+                kEndianness,                    // endianness
                 C2PlanarLayout::PLANE_U,        // rootIx
                 0,                              // offset
             };
@@ -672,7 +750,7 @@
                 16,                             // allocatedDepth
                 10,                             // bitDepth
                 6,                              // rightShift
-                C2PlaneInfo::LITTLE_END,        // endianness
+                kEndianness,                    // endianness
                 C2PlanarLayout::PLANE_U,        // rootIx
                 2,                              // offset
             };
@@ -680,9 +758,15 @@
         }
 
         default: {
-            // We don't know what it is, but let's try to lock it.
+            // We don't know what it is, let's try to lock it with gralloc4
             android_ycbcr ycbcrLayout;
+            c2_status_t status = Gralloc4Mapper_lock(
+                    const_cast<native_handle_t*>(mBuffer), grallocUsage, rect, layout, addr);
+            if (status == C2_OK) {
+                break;
+            }
 
+            // fallback to lockYCbCr
             status_t err = GraphicBufferMapper::get().lockYCbCr(
                     const_cast<native_handle_t*>(mBuffer), grallocUsage, rect, &ycbcrLayout);
             if (err == OK && ycbcrLayout.y && ycbcrLayout.cb && ycbcrLayout.cr
@@ -734,17 +818,6 @@
                     C2PlanarLayout::PLANE_V,          // rootIx
                     0,                                // offset
                 };
-                // handle interleaved formats
-                intptr_t uvOffset = addr[C2PlanarLayout::PLANE_V] - addr[C2PlanarLayout::PLANE_U];
-                if (uvOffset > 0 && uvOffset < (intptr_t)ycbcrLayout.chroma_step) {
-                    layout->rootPlanes = 2;
-                    layout->planes[C2PlanarLayout::PLANE_V].rootIx = C2PlanarLayout::PLANE_U;
-                    layout->planes[C2PlanarLayout::PLANE_V].offset = uvOffset;
-                } else if (uvOffset < 0 && uvOffset > -(intptr_t)ycbcrLayout.chroma_step) {
-                    layout->rootPlanes = 2;
-                    layout->planes[C2PlanarLayout::PLANE_U].rootIx = C2PlanarLayout::PLANE_V;
-                    layout->planes[C2PlanarLayout::PLANE_U].offset = -uvOffset;
-                }
                 break;
             }
 
@@ -790,6 +863,29 @@
     }
     mLocked = true;
 
+    // handle interleaved formats
+    if (layout->type == C2PlanarLayout::TYPE_YUV && layout->rootPlanes == 3) {
+        intptr_t uvOffset = addr[C2PlanarLayout::PLANE_V] - addr[C2PlanarLayout::PLANE_U];
+        intptr_t uvColInc = layout->planes[C2PlanarLayout::PLANE_U].colInc;
+        if (uvOffset > 0 && uvOffset < uvColInc) {
+            layout->rootPlanes = 2;
+            layout->planes[C2PlanarLayout::PLANE_V].rootIx = C2PlanarLayout::PLANE_U;
+            layout->planes[C2PlanarLayout::PLANE_V].offset = uvOffset;
+        } else if (uvOffset < 0 && uvOffset > -uvColInc) {
+            layout->rootPlanes = 2;
+            layout->planes[C2PlanarLayout::PLANE_U].rootIx = C2PlanarLayout::PLANE_V;
+            layout->planes[C2PlanarLayout::PLANE_U].offset = -uvOffset;
+        }
+    }
+
+    ALOGV("C2AllocationGralloc::map: layout: type=%d numPlanes=%d rootPlanes=%d",
+          layout->type, layout->numPlanes, layout->rootPlanes);
+    for (int i = 0; i < layout->numPlanes; ++i) {
+        const C2PlaneInfo &plane = layout->planes[i];
+        ALOGV("C2AllocationGralloc::map: plane[%d]: colInc=%d rowInc=%d rootIx=%u offset=%u",
+              i, plane.colInc, plane.rowInc, plane.rootIx, plane.offset);
+    }
+
     return C2_OK;
 }
 
diff --git a/media/codec2/vndk/C2AllocatorIon.cpp b/media/codec2/vndk/C2AllocatorIon.cpp
index 77b265a..7b593ee 100644
--- a/media/codec2/vndk/C2AllocatorIon.cpp
+++ b/media/codec2/vndk/C2AllocatorIon.cpp
@@ -207,6 +207,7 @@
 
         c2_status_t err = mapInternal(mapSize, mapOffset, alignmentBytes, prot, flags, &(map.addr), addr);
         if (map.addr) {
+            std::lock_guard<std::mutex> guard(mMutexMappings);
             mMappings.push_back(map);
         }
         return err;
@@ -217,22 +218,26 @@
             ALOGD("tried to unmap unmapped buffer");
             return C2_NOT_FOUND;
         }
-        for (auto it = mMappings.begin(); it != mMappings.end(); ++it) {
-            if (addr != (uint8_t *)it->addr + it->alignmentBytes ||
-                    size + it->alignmentBytes != it->size) {
-                continue;
+        { // Scope for the lock_guard of mMutexMappings.
+            std::lock_guard<std::mutex> guard(mMutexMappings);
+            for (auto it = mMappings.begin(); it != mMappings.end(); ++it) {
+                if (addr != (uint8_t *)it->addr + it->alignmentBytes ||
+                        size + it->alignmentBytes != it->size) {
+                    continue;
+                }
+                int err = munmap(it->addr, it->size);
+                if (err != 0) {
+                    ALOGD("munmap failed");
+                    return c2_map_errno<EINVAL>(errno);
+                }
+                if (fence) {
+                    *fence = C2Fence(); // not using fences
+                }
+                (void)mMappings.erase(it);
+                ALOGV("successfully unmapped: addr=%p size=%zu fd=%d", addr, size,
+                          mHandle.bufferFd());
+                return C2_OK;
             }
-            int err = munmap(it->addr, it->size);
-            if (err != 0) {
-                ALOGD("munmap failed");
-                return c2_map_errno<EINVAL>(errno);
-            }
-            if (fence) {
-                *fence = C2Fence(); // not using fences
-            }
-            (void)mMappings.erase(it);
-            ALOGV("successfully unmapped: addr=%p size=%zu fd=%d", addr, size, mHandle.bufferFd());
-            return C2_OK;
         }
         ALOGD("unmap failed to find specified map");
         return C2_BAD_VALUE;
@@ -241,6 +246,7 @@
     virtual ~Impl() {
         if (!mMappings.empty()) {
             ALOGD("Dangling mappings!");
+            std::lock_guard<std::mutex> guard(mMutexMappings);
             for (const Mapping &map : mMappings) {
                 (void)munmap(map.addr, map.size);
             }
@@ -320,6 +326,7 @@
         size_t size;
     };
     std::list<Mapping> mMappings;
+    std::mutex mMutexMappings;
 };
 
 class C2AllocationIon::ImplV2 : public C2AllocationIon::Impl {
diff --git a/media/codec2/vndk/C2Store.cpp b/media/codec2/vndk/C2Store.cpp
index 1660c38..dfdd84d 100644
--- a/media/codec2/vndk/C2Store.cpp
+++ b/media/codec2/vndk/C2Store.cpp
@@ -301,13 +301,21 @@
         std::lock_guard<std::mutex> lock(_mComponentStoreReadLock);
         _mComponentStore = store;
     }
-    std::shared_ptr<C2AllocatorIon> allocator;
+    std::shared_ptr<C2AllocatorIon> ionAllocator;
     {
         std::lock_guard<std::mutex> lock(gIonAllocatorMutex);
-        allocator = gIonAllocator.lock();
+        ionAllocator = gIonAllocator.lock();
     }
-    if (allocator) {
-        UseComponentStoreForIonAllocator(allocator, store);
+    if (ionAllocator) {
+        UseComponentStoreForIonAllocator(ionAllocator, store);
+    }
+    std::shared_ptr<C2DmaBufAllocator> dmaAllocator;
+    {
+        std::lock_guard<std::mutex> lock(gDmaBufAllocatorMutex);
+        dmaAllocator = gDmaBufAllocator.lock();
+    }
+    if (dmaAllocator) {
+        UseComponentStoreForDmaBufAllocator(dmaAllocator, store);
     }
 }
 
diff --git a/media/codec2/vndk/include/C2BqBufferPriv.h b/media/codec2/vndk/include/C2BqBufferPriv.h
index b2636e9..bec978a 100644
--- a/media/codec2/vndk/include/C2BqBufferPriv.h
+++ b/media/codec2/vndk/include/C2BqBufferPriv.h
@@ -101,6 +101,8 @@
             uint32_t generationId,
             uint64_t consumerUsage);
 
+    virtual void getConsumerUsage(uint64_t *consumerUsage);
+
 private:
     const std::shared_ptr<C2Allocator> mAllocator;
     const local_id_t mLocalId;
@@ -138,7 +140,6 @@
                 uint32_t toGeneration, uint64_t toUsage, uint64_t toBqId,
                 android::sp<android::GraphicBuffer>& graphicBuffer, uint32_t oldGeneration,
                 std::shared_ptr<C2SurfaceSyncMemory> syncMem);
-
 private:
     friend struct _C2BlockFactory;
 
diff --git a/media/codec2/vndk/platform/C2BqBuffer.cpp b/media/codec2/vndk/platform/C2BqBuffer.cpp
index 169de0c..63b0f39 100644
--- a/media/codec2/vndk/platform/C2BqBuffer.cpp
+++ b/media/codec2/vndk/platform/C2BqBuffer.cpp
@@ -16,6 +16,7 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "C2BqBuffer"
+#include <android/hardware_buffer.h>
 #include <utils/Log.h>
 
 #include <ui/BufferQueueDefs.h>
@@ -171,6 +172,91 @@
     return stamp;
 }
 
+// Do not rely on AHardwareBuffer module for GraphicBuffer handling since AHardwareBuffer
+// module is linked to framework which could have a different implementation of GraphicBuffer
+// than mainline/vndk implementation.(See b/203347494.)
+//
+// b2h/h2b between HardwareBuffer and GraphicBuffer cannot be used. (b2h/h2b depend on
+// AHardwareBuffer module for the conversion between HardwareBuffer and GraphicBuffer.)
+// hgbp_ prefixed methods are added to be used instead of b2h/h2b.
+//
+// TODO: Remove dependency with existing AHwB module. Also clean up conversions.(conversions here
+// and h2b/b2h coversions)
+const GraphicBuffer* hgbp_AHBuffer_to_GraphicBuffer(const AHardwareBuffer* buffer) {
+    return GraphicBuffer::fromAHardwareBuffer(buffer);
+}
+
+int hgbp_createFromHandle(const AHardwareBuffer_Desc* desc,
+                                     const native_handle_t* handle,
+                                     sp<GraphicBuffer> *outBuffer) {
+
+    if (!desc || !handle || !outBuffer) return ::android::BAD_VALUE;
+    if (desc->rfu0 != 0 || desc->rfu1 != 0) return ::android::BAD_VALUE;
+    if (desc->format == AHARDWAREBUFFER_FORMAT_BLOB && desc->height != 1)
+        return ::android::BAD_VALUE;
+
+    const int format = uint32_t(desc->format);
+    const uint64_t usage = uint64_t(desc->usage);
+    sp<GraphicBuffer> gbuffer(new GraphicBuffer(handle,
+                                                GraphicBuffer::HandleWrapMethod::CLONE_HANDLE,
+                                                desc->width, desc->height,
+                                                format, desc->layers, usage, desc->stride));
+    status_t err = gbuffer->initCheck();
+    if (err != 0 || gbuffer->handle == 0) return err;
+
+    *outBuffer = gbuffer;
+
+    return ::android::NO_ERROR;
+}
+
+void hgbp_describe(const AHardwareBuffer* buffer,
+        AHardwareBuffer_Desc* outDesc) {
+    if (!buffer || !outDesc) return;
+
+    const GraphicBuffer* gbuffer = hgbp_AHBuffer_to_GraphicBuffer(buffer);
+
+    outDesc->width = gbuffer->getWidth();
+    outDesc->height = gbuffer->getHeight();
+    outDesc->layers = gbuffer->getLayerCount();
+    outDesc->format = uint32_t(gbuffer->getPixelFormat());
+    outDesc->usage = uint64_t(gbuffer->getUsage());
+    outDesc->stride = gbuffer->getStride();
+    outDesc->rfu0 = 0;
+    outDesc->rfu1 = 0;
+}
+
+
+bool hgbp_h2b(HBuffer const& from, sp<GraphicBuffer>* to) {
+    AHardwareBuffer_Desc const* desc =
+            reinterpret_cast<AHardwareBuffer_Desc const*>(
+            from.description.data());
+    native_handle_t const* handle = from.nativeHandle;
+    if (hgbp_createFromHandle(desc, handle, to) != ::android::OK) {
+        return false;
+    }
+    return true;
+}
+
+bool hgbp_b2h(sp<GraphicBuffer> const& from, HBuffer* to,
+         uint32_t* toGenerationNumber) {
+    if (!from) {
+        return false;
+    }
+    AHardwareBuffer* hwBuffer = from->toAHardwareBuffer();
+    to->nativeHandle.setTo(
+          const_cast<native_handle_t*>(from->handle),
+          false);
+    hgbp_describe(
+            hwBuffer,
+            reinterpret_cast<AHardwareBuffer_Desc*>(to->description.data()));
+    if (toGenerationNumber) {
+        *toGenerationNumber = from->getGenerationNumber();
+    }
+    return true;
+}
+
+// End of hgbp methods for GraphicBuffer creation.
+
 bool getGenerationNumberAndUsage(const sp<HGraphicBufferProducer> &producer,
                                  uint32_t *generation, uint64_t *usage) {
     status_t status{};
@@ -211,7 +297,7 @@
                     HBuffer const& hBuffer,
                     uint32_t generationNumber){
                 if (h2b(hStatus, &status) &&
-                        h2b(hBuffer, &slotBuffer) &&
+                        hgbp_h2b(hBuffer, &slotBuffer) &&
                         slotBuffer) {
                     *generation = generationNumber;
                     *usage = slotBuffer->getUsage();
@@ -402,7 +488,7 @@
                             HBuffer const& hBuffer,
                             uint32_t generationNumber){
                         if (h2b(hStatus, &status) &&
-                                h2b(hBuffer, &slotBuffer) &&
+                                hgbp_h2b(hBuffer, &slotBuffer) &&
                                 slotBuffer) {
                             slotBuffer->setGenerationNumber(generationNumber);
                             outGeneration = generationNumber;
@@ -485,19 +571,12 @@
 public:
     Impl(const std::shared_ptr<C2Allocator> &allocator)
         : mInit(C2_OK), mProducerId(0), mGeneration(0),
-          mDqFailure(0), mLastDqTs(0), mLastDqLogTs(0),
-          mAllocator(allocator) {
+          mConsumerUsage(0), mDqFailure(0), mLastDqTs(0),
+          mLastDqLogTs(0), mAllocator(allocator) {
     }
 
     ~Impl() {
-        bool noInit = false;
         for (int i = 0; i < NUM_BUFFER_SLOTS; ++i) {
-            if (!noInit && mProducer) {
-                Return<HStatus> transResult =
-                        mProducer->detachBuffer(static_cast<int32_t>(i));
-                noInit = !transResult.isOk() ||
-                         static_cast<HStatus>(transResult) == HStatus::NO_INIT;
-            }
             mBuffers[i].clear();
         }
     }
@@ -606,15 +685,6 @@
         {
             sp<GraphicBuffer> buffers[NUM_BUFFER_SLOTS];
             std::scoped_lock<std::mutex> lock(mMutex);
-            bool noInit = false;
-            for (int i = 0; i < NUM_BUFFER_SLOTS; ++i) {
-                if (!noInit && mProducer) {
-                    Return<HStatus> transResult =
-                            mProducer->detachBuffer(static_cast<int32_t>(i));
-                    noInit = !transResult.isOk() ||
-                             static_cast<HStatus>(transResult) == HStatus::NO_INIT;
-                }
-            }
             int32_t oldGeneration = mGeneration;
             if (producer) {
                 mProducer = producer;
@@ -661,6 +731,11 @@
                   "bqId: %llu migrated buffers # %d",
                   generation, (unsigned long long)producerId, migrated);
         }
+        mConsumerUsage = usage;
+    }
+
+    void getConsumerUsage(uint64_t *consumeUsage) {
+        *consumeUsage = mConsumerUsage;
     }
 
 private:
@@ -669,6 +744,7 @@
     c2_status_t mInit;
     uint64_t mProducerId;
     uint32_t mGeneration;
+    uint64_t mConsumerUsage;
     OnRenderCallback mRenderCallback;
 
     size_t mDqFailure;
@@ -804,7 +880,7 @@
 
     HBuffer hBuffer{};
     uint32_t hGenerationNumber{};
-    if (!b2h(graphicBuffer, &hBuffer, &hGenerationNumber)) {
+    if (!hgbp_b2h(graphicBuffer, &hBuffer, &hGenerationNumber)) {
         ALOGD("I to O conversion failed");
         return -1;
     }
@@ -1000,3 +1076,10 @@
         mImpl->setRenderCallback(renderCallback);
     }
 }
+
+void C2BufferQueueBlockPool::getConsumerUsage(uint64_t *consumeUsage) {
+    if (mImpl) {
+        mImpl->getConsumerUsage(consumeUsage);
+    }
+}
+
diff --git a/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp b/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
index e55bdc0..2115cc3 100644
--- a/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
+++ b/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
@@ -228,10 +228,10 @@
     tv.tv_nsec = timeoutNs % 1000000000;
 
     int ret =  syscall(__NR_futex, &mCond, FUTEX_WAIT, waitId, &tv, NULL, 0);
-    if (ret == 0 || ret == EAGAIN) {
+    if (ret == 0 || errno == EAGAIN) {
         return C2_OK;
     }
-    if (ret == EINTR || ret == ETIMEDOUT) {
+    if (errno == EINTR || errno == ETIMEDOUT) {
         return C2_TIMED_OUT;
     }
     return C2_BAD_VALUE;
diff --git a/media/codecs/m4v_h263/dec/src/vop.cpp b/media/codecs/m4v_h263/dec/src/vop.cpp
index 7b32498..abc0861 100644
--- a/media/codecs/m4v_h263/dec/src/vop.cpp
+++ b/media/codecs/m4v_h263/dec/src/vop.cpp
@@ -107,26 +107,57 @@
 #ifndef PV_TOLERATE_VOL_ERRORS
         if (layer)                                                      /*    */
         {
-            /* support SSPL0-2  */
-            if (tmpvar != 0x10 && tmpvar != 0x11 && tmpvar != 0x12 &&
-                    tmpvar != 0xA1 && tmpvar != 0xA2  && tmpvar != 0xA3/* Core SP@L1-L3 */)
-                return PV_FAIL;
+            switch (tmpvar)
+            {
+                /* Simple Scalable Profile Levels */
+                case 0x10:
+                case 0x11:
+                case 0x12:
+                /* Core Scalable Profile Levels */
+                case 0xA1:
+                case 0xA2:
+                case 0xA3:
+                    // Do Nothing, the cases listed above are supported values
+                    break;
+                default:
+                    // Unsupport profile level
+                    return PV_FAIL;
+              }
         }
         else
         {
-            /* support SPL0-3 & SSPL0-2   */
-            if (tmpvar != 0x01 && tmpvar != 0x02 && tmpvar != 0x03 && tmpvar != 0x08 &&
-                    /* While not technically supported, try to decode SPL4&SPL5 files as well. */
-                    /* We'll fail later if the size is too large.  This is to allow playback of */
-                    /* some <=CIF files generated by other encoders. */
-                    tmpvar != 0x04 && tmpvar != 0x05 &&
-                    tmpvar != 0x10 && tmpvar != 0x11 && tmpvar != 0x12 &&
-                    tmpvar != 0x21 && tmpvar != 0x22 &&  /* Core Profile Levels */
-                    tmpvar != 0xA1 && tmpvar != 0xA2 && tmpvar != 0xA3 &&
-                    tmpvar != 0xF0 && tmpvar != 0xF1 && /* Advanced Simple Profile Levels*/
-                    tmpvar != 0xF2 && tmpvar != 0xF3 &&
-                    tmpvar != 0xF4 && tmpvar != 0xF5)
-                return PV_FAIL;
+            switch (tmpvar)
+            {
+                /* Simple Profile Levels */
+                case 0x01:
+                case 0x02:
+                case 0x03:
+                case 0x04:
+                case 0x05:
+                case 0x06:
+                case 0x08:
+                case 0x10:
+                case 0x11:
+                case 0x12:
+                /* Core Profile Levels */
+                case 0x21:
+                case 0x22:
+                case 0xA1:
+                case 0xA2:
+                case 0xA3:
+                /* Advanced Simple Profile Levels*/
+                case 0xF0:
+                case 0xF1:
+                case 0xF2:
+                case 0xF3:
+                case 0xF4:
+                case 0xF5:
+                    // Do Nothing, the cases listed above are supported values
+                    break;
+                default:
+                    // Unsupport profile level
+                    return PV_FAIL;
+            }
         }
 #else
         profile = tmpvar;
diff --git a/media/codecs/m4v_h263/enc/src/fastidct.cpp b/media/codecs/m4v_h263/enc/src/fastidct.cpp
index 688effc..ec1b28f 100644
--- a/media/codecs/m4v_h263/enc/src/fastidct.cpp
+++ b/media/codecs/m4v_h263/enc/src/fastidct.cpp
@@ -76,6 +76,8 @@
     return ;
 }
 
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_col2(Short *blk)
 {
     int32 x0, x1, x3, x5, x7;//, x8;
@@ -102,6 +104,8 @@
     return ;
 }
 
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_col3(Short *blk)
 {
     int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
@@ -137,6 +141,8 @@
     return ;
 }
 
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_col4(Short *blk)
 {
     int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
@@ -180,6 +186,8 @@
 }
 
 #ifndef SMALL_DCT
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_col0x40(Short *blk)
 {
     int32 x1, x3, x5, x7;//, x8;
@@ -230,6 +238,8 @@
     return ;
 }
 
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_col0x10(Short *blk)
 {
     int32 x1, x3, x5,  x7;
@@ -256,6 +266,8 @@
 
 #endif /* SMALL_DCT */
 
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_col(Short *blk)
 {
     int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
@@ -368,6 +380,8 @@
     return;
 }
 
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_row2Inter(Short *blk, UChar *rec, Int lx)
 {
     int32 x0, x1, x2, x4, x5;
@@ -427,6 +441,8 @@
     return ;
 }
 
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_row3Inter(Short *blk, UChar *rec, Int lx)
 {
     int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
@@ -497,6 +513,8 @@
     return ;
 }
 
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_row4Inter(Short *blk, UChar *rec, Int lx)
 {
     int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
@@ -573,6 +591,8 @@
 }
 
 #ifndef SMALL_DCT
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_row0x40Inter(Short *blk, UChar *rec, Int lx)
 {
     int32 x1, x2, x4, x5;
@@ -686,6 +706,8 @@
     return ;
 }
 
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_row0x10Inter(Short *blk, UChar *rec, Int lx)
 {
     int32 x1, x3, x5, x7;
@@ -741,6 +763,8 @@
 
 #endif /* SMALL_DCT */
 
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_rowInter(Short *blk, UChar *rec, Int lx)
 {
     int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
@@ -864,6 +888,8 @@
     return;
 }
 
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_row2Intra(Short *blk, UChar *rec, Int lx)
 {
     int32 x0, x1, x2, x4, x5;
@@ -919,6 +945,8 @@
     return ;
 }
 
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_row3Intra(Short *blk, UChar *rec, Int lx)
 {
     int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
@@ -985,6 +1013,8 @@
     return ;
 }
 
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_row4Intra(Short *blk, UChar *rec, Int lx)
 {
     int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
@@ -1058,6 +1088,8 @@
 }
 
 #ifndef SMALL_DCT
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_row0x40Intra(Short *blk, UChar *rec, Int lx)
 {
     int32  x1, x2, x4, x5;
@@ -1166,6 +1198,8 @@
     return ;
 }
 
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_row0x10Intra(Short *blk, UChar *rec, Int lx)
 {
     int32 x1, x3, x5, x7;
@@ -1218,6 +1252,8 @@
 }
 
 #endif /* SMALL_DCT */
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_rowIntra(Short *blk, UChar *rec, Int lx)
 {
     int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
@@ -1364,6 +1400,8 @@
     return;
 }
 
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_row2zmv(Short *blk, UChar *rec, UChar *pred, Int lx)
 {
     int32 x0, x1, x2, x4, x5;
@@ -1424,6 +1462,8 @@
     return ;
 }
 
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_row3zmv(Short *blk, UChar *rec, UChar *pred, Int lx)
 {
     int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
@@ -1495,6 +1535,8 @@
     return ;
 }
 
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_row4zmv(Short *blk, UChar *rec, UChar *pred, Int lx)
 {
     int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
@@ -1572,6 +1614,8 @@
 }
 
 #ifndef SMALL_DCT
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_row0x40zmv(Short *blk, UChar *rec, UChar *pred, Int lx)
 {
     int32 x1, x2, x4, x5;
@@ -1687,6 +1731,8 @@
     return ;
 }
 
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_row0x10zmv(Short *blk, UChar *rec, UChar *pred, Int lx)
 {
     int32 x1, x3, x5, x7;
@@ -1743,6 +1789,8 @@
 
 #endif /* SMALL_DCT */
 
+/* Ignoring overflows as idct function expects and uses overflows */
+__attribute__((no_sanitize("signed-integer-overflow")))
 void idct_rowzmv(Short *blk, UChar *rec, UChar *pred, Int lx)
 {
     int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
diff --git a/media/codecs/m4v_h263/enc/src/mp4enc_api.cpp b/media/codecs/m4v_h263/enc/src/mp4enc_api.cpp
index 00b2ab6..b295258 100644
--- a/media/codecs/m4v_h263/enc/src/mp4enc_api.cpp
+++ b/media/codecs/m4v_h263/enc/src/mp4enc_api.cpp
@@ -501,13 +501,16 @@
     /* check frame rate */
     for (i = 0; i < encParams->nLayers; i++)
     {
+        if (encOption->encFrameRate[i] <= 0. || encOption->encFrameRate[i] > 120)
+        {
+            goto CLEAN_UP;
+        }
         encParams->LayerFrameRate[i] = encOption->encFrameRate[i];
     }
 
     if (encParams->nLayers > 1)
     {
-        if (encOption->encFrameRate[0] == encOption->encFrameRate[1] ||
-                encOption->encFrameRate[0] == 0. || encOption->encFrameRate[1] == 0.) /* 7/31/03 */
+        if (encOption->encFrameRate[0] == encOption->encFrameRate[1])
             goto CLEAN_UP;
     }
     /* set max frame rate */
diff --git a/media/codecs/m4v_h263/fuzzer/mpeg4_h263_dec_fuzzer.cpp b/media/codecs/m4v_h263/fuzzer/mpeg4_h263_dec_fuzzer.cpp
index 912c821..5e613d9 100644
--- a/media/codecs/m4v_h263/fuzzer/mpeg4_h263_dec_fuzzer.cpp
+++ b/media/codecs/m4v_h263/fuzzer/mpeg4_h263_dec_fuzzer.cpp
@@ -50,7 +50,7 @@
 
  private:
   tagvideoDecControls *mDecHandle = nullptr;
-  uint8_t *mOutputBuffer[kNumOutputBuffers];
+  uint8_t *mOutputBuffer[kNumOutputBuffers] = {};
   bool mInitialized = false;
   bool mFramesConfigured = false;
 #ifdef MPEG4
diff --git a/media/codecs/mp3dec/src/pvmp3_stereo_proc.cpp b/media/codecs/mp3dec/src/pvmp3_stereo_proc.cpp
index 4338c43..c04f7f3 100644
--- a/media/codecs/mp3dec/src/pvmp3_stereo_proc.cpp
+++ b/media/codecs/mp3dec/src/pvmp3_stereo_proc.cpp
@@ -219,6 +219,9 @@
 ; FUNCTION CODE
 ----------------------------------------------------------------------------*/
 
+#if __has_attribute(no_sanitize)
+__attribute__((no_sanitize("integer")))
+#endif
 void pvmp3_st_intensity(int32 xr[SUBBANDS_NUMBER*FILTERBANK_BANDS],
                         int32 xl[SUBBANDS_NUMBER*FILTERBANK_BANDS],
                         int32 is_pos,
diff --git a/media/extractors/flac/Android.bp b/media/extractors/flac/Android.bp
index 9a2a76b..f6ce969 100644
--- a/media/extractors/flac/Android.bp
+++ b/media/extractors/flac/Android.bp
@@ -19,7 +19,7 @@
 
 cc_library {
     name: "libflacextractor",
-    defaults: ["extractor-defaults", "libbinder_ndk_host_user"],
+    defaults: ["extractor-defaults"],
 
     srcs: ["FLACExtractor.cpp"],
 
diff --git a/media/extractors/mp4/MPEG4Extractor.cpp b/media/extractors/mp4/MPEG4Extractor.cpp
index 8836c47..eccbf46 100644
--- a/media/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/extractors/mp4/MPEG4Extractor.cpp
@@ -1132,10 +1132,10 @@
                         && size >= 5) {
                         const uint8_t *ptr = (const uint8_t *)data;
                         const uint8_t profile = ptr[2] >> 1;
-                        const uint8_t bl_compatibility_id = (ptr[4]) >> 4;
+                        const uint8_t blCompatibilityId = (ptr[4]) >> 4;
                         bool create_two_tracks = false;
 
-                        if (bl_compatibility_id && bl_compatibility_id != 15) {
+                        if (blCompatibilityId && blCompatibilityId != 15) {
                             create_two_tracks = true;
                         }
 
@@ -1147,13 +1147,15 @@
 
                             track_b->timescale = mLastTrack->timescale;
                             track_b->sampleTable = mLastTrack->sampleTable;
-                            track_b->includes_expensive_metadata = mLastTrack->includes_expensive_metadata;
+                            track_b->includes_expensive_metadata =
+                                mLastTrack->includes_expensive_metadata;
                             track_b->skipTrack = mLastTrack->skipTrack;
                             track_b->elst_needs_processing = mLastTrack->elst_needs_processing;
                             track_b->elst_media_time = mLastTrack->elst_media_time;
                             track_b->elst_segment_duration = mLastTrack->elst_segment_duration;
                             track_b->elst_shift_start_ticks = mLastTrack->elst_shift_start_ticks;
-                            track_b->elst_initial_empty_edit_ticks = mLastTrack->elst_initial_empty_edit_ticks;
+                            track_b->elst_initial_empty_edit_ticks =
+                                mLastTrack->elst_initial_empty_edit_ticks;
                             track_b->subsample_encryption = mLastTrack->subsample_encryption;
 
                             track_b->mTx3gBuffer = mLastTrack->mTx3gBuffer;
@@ -2591,9 +2593,11 @@
             *offset += chunk_size;
             break;
         }
-        case FOURCC("dvcC"):
-        case FOURCC("dvvC"): {
 
+        case FOURCC("dvcC"):
+        case FOURCC("dvvC"):
+        case FOURCC("dvwC"):
+        {
             if (chunk_data_size != 24) {
                 return ERROR_MALFORMED;
             }
@@ -2613,13 +2617,14 @@
                 return ERROR_MALFORMED;
 
             AMediaFormat_setBuffer(mLastTrack->meta, AMEDIAFORMAT_KEY_CSD_2,
-                                   buffer.get(), chunk_data_size);
+                                    buffer.get(), chunk_data_size);
             AMediaFormat_setString(mLastTrack->meta, AMEDIAFORMAT_KEY_MIME,
                                    MEDIA_MIMETYPE_VIDEO_DOLBY_VISION);
 
             *offset += chunk_size;
             break;
         }
+
         case FOURCC("d263"):
         {
             *offset += chunk_size;
@@ -3480,7 +3485,7 @@
         }
         unsigned mask = br.getBits(8);
         for (unsigned i = 0; i < 8; i++) {
-            if (((0x1 << i) && mask) == 0)
+            if (((0x1 << i) & mask) == 0)
                 continue;
 
             if (br.numBitsLeft() < 8) {
@@ -4458,7 +4463,6 @@
     if (!AMediaFormat_getString(track->meta, AMEDIAFORMAT_KEY_MIME, &mime)) {
         return NULL;
     }
-
     sp<ItemTable> itemTable;
     if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)) {
         void *data;
@@ -4491,14 +4495,14 @@
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
         void *data;
         size_t size;
-        if (!AMediaFormat_getBuffer(track->meta, AMEDIAFORMAT_KEY_CSD_2, &data, &size)) {
+        if (!AMediaFormat_getBuffer(track->meta, AMEDIAFORMAT_KEY_CSD_2, &data, &size)
+                || size != 24) {
             return NULL;
         }
 
         const uint8_t *ptr = (const uint8_t *)data;
-
         // dv_major.dv_minor Should be 1.0 or 2.1
-        if (size != 24 || ((ptr[0] != 1 || ptr[1] != 0) && (ptr[0] != 2 || ptr[1] != 1))) {
+        if ((ptr[0] != 1 || ptr[1] != 0) && (ptr[0] != 2 || ptr[1] != 1)) {
             return NULL;
         }
    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AV1)
diff --git a/media/extractors/mpeg2/Android.bp b/media/extractors/mpeg2/Android.bp
index 7e6247b..4f0796e 100644
--- a/media/extractors/mpeg2/Android.bp
+++ b/media/extractors/mpeg2/Android.bp
@@ -61,6 +61,7 @@
         "libhidlbase",
         "libhidlmemory",
         "libjsoncpp",
+        "libmedia_helper",
         "libprocessgroup",
         "libstagefright_esds",
         "libstagefright_foundation_without_imemory",
diff --git a/media/extractors/tests/Android.bp b/media/extractors/tests/Android.bp
index 5d97d9a..23c74f7 100644
--- a/media/extractors/tests/Android.bp
+++ b/media/extractors/tests/Android.bp
@@ -45,14 +45,11 @@
         "libdatasource",
         "libwatchdog",
 
-        "libstagefright",
         "libstagefright_id3",
         "libstagefright_flacdec",
         "libstagefright_esds",
         "libstagefright_mpeg2support",
-        "libstagefright_mpeg2extractor",
         "libstagefright_foundation_colorutils_ndk",
-        "libstagefright_foundation",
         "libstagefright_metadatautils",
 
         "libmedia_midiiowrapper",
@@ -74,6 +71,8 @@
         "libcutils",
         "libmediandk",
         "libmedia",
+        "libstagefright",
+        "libstagefright_foundation",
         "libcrypto",
         "libhidlmemory",
         "libhidlbase",
diff --git a/media/extractors/wav/Android.bp b/media/extractors/wav/Android.bp
index cc5e1c7..76546b8 100644
--- a/media/extractors/wav/Android.bp
+++ b/media/extractors/wav/Android.bp
@@ -18,7 +18,7 @@
 cc_library {
     name: "libwavextractor",
 
-    defaults: ["extractor-defaults", "libbinder_ndk_host_user"],
+    defaults: ["extractor-defaults"],
 
     srcs: ["WAVExtractor.cpp"],
 
diff --git a/media/janitors/codec_OWNERS b/media/janitors/codec_OWNERS
index e201399..d4ee51b 100644
--- a/media/janitors/codec_OWNERS
+++ b/media/janitors/codec_OWNERS
@@ -2,4 +2,4 @@
 # differentiated from plugins connecting those codecs to either omx or codec2 infrastructure
 essick@google.com
 lajos@google.com
-marcone@google.com
+wonsik@google.com
diff --git a/media/janitors/media_leads_OWNERS b/media/janitors/media_leads_OWNERS
new file mode 100644
index 0000000..b7dbdee
--- /dev/null
+++ b/media/janitors/media_leads_OWNERS
@@ -0,0 +1,9 @@
+# gerrit owner/approvers corresponding to the TLs within the media team
+# loosely (as of 2022/3) fgoldfain@ and direct reports
+arifdikici@google.com
+elaurent@google.com
+fgoldfain@google.com    #{LAST_RESORT_SUGGESTION}
+lajos@google.com
+nchalko@google.com
+olly@google.com
+robertshih@google.com
diff --git a/media/janitors/reliability_mainline_OWNERS b/media/janitors/reliability_mainline_OWNERS
new file mode 100644
index 0000000..cced19c
--- /dev/null
+++ b/media/janitors/reliability_mainline_OWNERS
@@ -0,0 +1,5 @@
+# Bug component: 1051309
+# go/android-media-reliability
+
+essick@google.com
+nchalko@google.com
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index 212a787..4ebb530 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -40,7 +40,7 @@
 /**
  * This is used to represent a value that has not been specified.
  * For example, an application could use {@link #AAUDIO_UNSPECIFIED} to indicate
- * that is did not not care what the specific value of a parameter was
+ * that it did not care what the specific value of a parameter was
  * and would accept whatever it was given.
  */
 #define AAUDIO_UNSPECIFIED           0
diff --git a/media/libaaudio/src/core/AudioStream.cpp b/media/libaaudio/src/core/AudioStream.cpp
index 73432af..06f05b0 100644
--- a/media/libaaudio/src/core/AudioStream.cpp
+++ b/media/libaaudio/src/core/AudioStream.cpp
@@ -602,6 +602,7 @@
 
 void AudioStream::setDuckAndMuteVolume(float duckAndMuteVolume) {
     ALOGD("%s() to %f", __func__, duckAndMuteVolume);
+    std::lock_guard<std::mutex> lock(mStreamLock);
     mDuckAndMuteVolume = duckAndMuteVolume;
     doSetVolume(); // apply this change
 }
diff --git a/media/libaudioclient/AidlConversion.cpp b/media/libaudioclient/AidlConversion.cpp
index acab774..a9ac3d9 100644
--- a/media/libaudioclient/AidlConversion.cpp
+++ b/media/libaudioclient/AidlConversion.cpp
@@ -520,6 +520,8 @@
             return AUDIO_INPUT_FLAG_HW_AV_SYNC;
         case media::AudioInputFlags::DIRECT:
             return AUDIO_INPUT_FLAG_DIRECT;
+        case media::AudioInputFlags::ULTRASOUND:
+            return AUDIO_INPUT_FLAG_ULTRASOUND;
     }
     return unexpected(BAD_VALUE);
 }
@@ -545,6 +547,8 @@
             return media::AudioInputFlags::HW_AV_SYNC;
         case AUDIO_INPUT_FLAG_DIRECT:
             return media::AudioInputFlags::DIRECT;
+        case AUDIO_INPUT_FLAG_ULTRASOUND:
+            return media::AudioInputFlags::ULTRASOUND;
     }
     return unexpected(BAD_VALUE);
 }
@@ -584,6 +588,10 @@
             return AUDIO_OUTPUT_FLAG_INCALL_MUSIC;
         case media::AudioOutputFlags::GAPLESS_OFFLOAD:
             return AUDIO_OUTPUT_FLAG_GAPLESS_OFFLOAD;
+        case media::AudioOutputFlags::SPATIALIZER:
+            return AUDIO_OUTPUT_FLAG_SPATIALIZER;
+        case media::AudioOutputFlags::ULTRASOUND:
+            return AUDIO_OUTPUT_FLAG_ULTRASOUND;
     }
     return unexpected(BAD_VALUE);
 }
@@ -625,6 +633,10 @@
             return media::AudioOutputFlags::INCALL_MUSIC;
         case AUDIO_OUTPUT_FLAG_GAPLESS_OFFLOAD:
             return media::AudioOutputFlags::GAPLESS_OFFLOAD;
+        case AUDIO_OUTPUT_FLAG_SPATIALIZER:
+            return media::AudioOutputFlags::SPATIALIZER;
+        case AUDIO_OUTPUT_FLAG_ULTRASOUND:
+            return media::AudioOutputFlags::ULTRASOUND;
     }
     return unexpected(BAD_VALUE);
 }
@@ -850,6 +862,8 @@
             return AUDIO_SOURCE_FM_TUNER;
         case media::AudioSourceType::HOTWORD:
             return AUDIO_SOURCE_HOTWORD;
+        case media::AudioSourceType::ULTRASOUND:
+            return AUDIO_SOURCE_ULTRASOUND;
     }
     return unexpected(BAD_VALUE);
 }
@@ -887,6 +901,8 @@
             return media::AudioSourceType::FM_TUNER;
         case AUDIO_SOURCE_HOTWORD:
             return media::AudioSourceType::HOTWORD;
+        case AUDIO_SOURCE_ULTRASOUND:
+            return media::AudioSourceType::ULTRASOUND;
     }
     return unexpected(BAD_VALUE);
 }
@@ -1207,6 +1223,8 @@
             return AUDIO_CONTENT_TYPE_MOVIE;
         case media::AudioContentType::SONIFICATION:
             return AUDIO_CONTENT_TYPE_SONIFICATION;
+        case media::AudioContentType::ULTRASOUND:
+            return AUDIO_CONTENT_TYPE_ULTRASOUND;
     }
     return unexpected(BAD_VALUE);
 }
@@ -1224,6 +1242,8 @@
             return media::AudioContentType::MOVIE;
         case AUDIO_CONTENT_TYPE_SONIFICATION:
             return media::AudioContentType::SONIFICATION;
+        case AUDIO_CONTENT_TYPE_ULTRASOUND:
+            return media::AudioContentType::ULTRASOUND;
     }
     return unexpected(BAD_VALUE);
 }
diff --git a/media/libaudioclient/Android.bp b/media/libaudioclient/Android.bp
index 3c3d340..a00cb79 100644
--- a/media/libaudioclient/Android.bp
+++ b/media/libaudioclient/Android.bp
@@ -41,7 +41,7 @@
     },
 }
 
-cc_library_shared {
+cc_library {
     name: "libaudiopolicy",
     srcs: [
         "AudioAttributes.cpp",
@@ -136,6 +136,7 @@
         "libutils",
         "libvibrator",
         "framework-permission-aidl-cpp",
+        "packagemanager_aidl-cpp",
     ],
     export_shared_lib_headers: [
         "audioflinger-aidl-cpp",
@@ -201,7 +202,7 @@
     ],
     apex_available: [
         "//apex_available:platform",
-        "com.android.bluetooth.updatable",
+        "com.android.bluetooth",
         "com.android.media",
         "com.android.media.swcodec",
     ],
diff --git a/media/libaudioclient/AudioEffect.cpp b/media/libaudioclient/AudioEffect.cpp
index 9091599..8c645c3 100644
--- a/media/libaudioclient/AudioEffect.cpp
+++ b/media/libaudioclient/AudioEffect.cpp
@@ -94,7 +94,7 @@
         return NO_INIT;
     }
 
-    if (type == NULL && uuid == NULL) {
+    if (type == nullptr && uuid == nullptr) {
         ALOGW("Must specify at least type or uuid");
         return BAD_VALUE;
     }
@@ -105,8 +105,8 @@
     mSessionId = sessionId;
 
     memset(&mDescriptor, 0, sizeof(effect_descriptor_t));
-    mDescriptor.type = *(type != NULL ? type : EFFECT_UUID_NULL);
-    mDescriptor.uuid = *(uuid != NULL ? uuid : EFFECT_UUID_NULL);
+    mDescriptor.type = *(type != nullptr ? type : EFFECT_UUID_NULL);
+    mDescriptor.uuid = *(uuid != nullptr ? uuid : EFFECT_UUID_NULL);
 
     // TODO b/182392769: use attribution source util
     mIEffectClient = new EffectClient(this);
@@ -228,7 +228,7 @@
             AudioSystem::releaseAudioSessionId(mSessionId,
                 VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(mClientAttributionSource.pid)));
         }
-        if (mIEffect != NULL) {
+        if (mIEffect != nullptr) {
             mIEffect->disconnect();
             IInterface::asBinder(mIEffect)->unlinkToDeath(mIEffectClient);
         }
@@ -306,7 +306,7 @@
         if (mEnabled == (cmdCode == EFFECT_CMD_ENABLE)) {
             return NO_ERROR;
         }
-        if (replySize == NULL || *replySize != sizeof(status_t) || replyData == NULL) {
+        if (replySize == nullptr || *replySize != sizeof(status_t) || replyData == nullptr) {
             return BAD_VALUE;
         }
         mLock.lock();
@@ -349,7 +349,7 @@
         return (mStatus == ALREADY_EXISTS) ? (status_t) INVALID_OPERATION : mStatus;
     }
 
-    if (param == NULL || param->psize == 0 || param->vsize == 0) {
+    if (param == nullptr || param->psize == 0 || param->vsize == 0) {
         return BAD_VALUE;
     }
 
@@ -384,8 +384,7 @@
     if (mStatus != NO_ERROR) {
         return (mStatus == ALREADY_EXISTS) ? (status_t) INVALID_OPERATION : mStatus;
     }
-
-    if (param == NULL || param->psize == 0 || param->vsize == 0) {
+    if (param == nullptr || param->psize == 0 || param->vsize == 0) {
         return BAD_VALUE;
     }
 
@@ -440,8 +439,7 @@
     if (mStatus != NO_ERROR && mStatus != ALREADY_EXISTS) {
         return mStatus;
     }
-
-    if (param == NULL || param->psize == 0 || param->vsize == 0) {
+    if (param == nullptr || param->psize == 0 || param->vsize == 0) {
         return BAD_VALUE;
     }
 
@@ -537,6 +535,9 @@
 
 status_t AudioEffect::queryNumberEffects(uint32_t *numEffects)
 {
+    if (numEffects == nullptr) {
+        return BAD_VALUE;
+    }
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     return af->queryNumberEffects(numEffects);
@@ -544,6 +545,9 @@
 
 status_t AudioEffect::queryEffect(uint32_t index, effect_descriptor_t *descriptor)
 {
+    if (descriptor == nullptr) {
+        return BAD_VALUE;
+    }
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     return af->queryEffect(index, descriptor);
@@ -554,6 +558,9 @@
                                           uint32_t preferredTypeFlag,
                                           effect_descriptor_t *descriptor)
 {
+    if (uuid == nullptr || type == nullptr || descriptor == nullptr) {
+        return BAD_VALUE;
+    }
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     return af->getEffectDescriptor(uuid, type, preferredTypeFlag, descriptor);
@@ -584,6 +591,9 @@
 
 status_t AudioEffect::newEffectUniqueId(audio_unique_id_t* id)
 {
+    if (id == nullptr) {
+        return BAD_VALUE;
+    }
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     *id = af->newAudioUniqueId(AUDIO_UNIQUE_ID_USE_EFFECT);
@@ -597,14 +607,15 @@
                                              audio_source_t source,
                                              audio_unique_id_t *id)
 {
+    if ((typeStr == nullptr && uuidStr == nullptr) || id == nullptr) {
+        return BAD_VALUE;
+    }
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
-    if (typeStr == NULL && uuidStr == NULL) return BAD_VALUE;
-
     // Convert type & uuid from string to effect_uuid_t.
     effect_uuid_t type;
-    if (typeStr != NULL) {
+    if (typeStr != nullptr) {
         status_t res = stringToGuid(typeStr, &type);
         if (res != OK) return res;
     } else {
@@ -612,7 +623,7 @@
     }
 
     effect_uuid_t uuid;
-    if (uuidStr != NULL) {
+    if (uuidStr != nullptr) {
         status_t res = stringToGuid(uuidStr, &uuid);
         if (res != OK) return res;
     } else {
@@ -640,14 +651,15 @@
                                              audio_usage_t usage,
                                              audio_unique_id_t *id)
 {
+    if ((typeStr == nullptr && uuidStr == nullptr) || id == nullptr) {
+        return BAD_VALUE;
+    }
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
-    if (typeStr == NULL && uuidStr == NULL) return BAD_VALUE;
-
     // Convert type & uuid from string to effect_uuid_t.
     effect_uuid_t type;
-    if (typeStr != NULL) {
+    if (typeStr != nullptr) {
         status_t res = stringToGuid(typeStr, &type);
         if (res != OK) return res;
     } else {
@@ -655,7 +667,7 @@
     }
 
     effect_uuid_t uuid;
-    if (uuidStr != NULL) {
+    if (uuidStr != nullptr) {
         status_t res = stringToGuid(uuidStr, &uuid);
         if (res != OK) return res;
     } else {
@@ -698,7 +710,7 @@
 
 status_t AudioEffect::stringToGuid(const char *str, effect_uuid_t *guid)
 {
-    if (str == NULL || guid == NULL) {
+    if (str == nullptr || guid == nullptr) {
         return BAD_VALUE;
     }
 
@@ -724,7 +736,7 @@
 
 status_t AudioEffect::guidToString(const effect_uuid_t *guid, char *str, size_t maxLen)
 {
-    if (guid == NULL || str == NULL) {
+    if (guid == nullptr || str == nullptr) {
         return BAD_VALUE;
     }
 
diff --git a/media/libaudioclient/AudioRecord.cpp b/media/libaudioclient/AudioRecord.cpp
index da21771..22f0295 100644
--- a/media/libaudioclient/AudioRecord.cpp
+++ b/media/libaudioclient/AudioRecord.cpp
@@ -205,16 +205,18 @@
     // Otherwise the callback thread will never exit.
     stop();
     if (mAudioRecordThread != 0) {
-        mProxy->interrupt();
         mAudioRecordThread->requestExit();  // see comment in AudioRecord.h
+        mProxy->interrupt();
         mAudioRecordThread->requestExitAndWait();
         mAudioRecordThread.clear();
     }
-    // No lock here: worst case we remove a NULL callback which will be a nop
+
+    AutoMutex lock(mLock);
     if (mDeviceCallback != 0 && mInput != AUDIO_IO_HANDLE_NONE) {
         // This may not stop all of these device callbacks!
         // TODO: Add some sort of protection.
         AudioSystem::removeAudioDeviceCallback(this, mInput, mPortId);
+        mDeviceCallback.clear();
     }
 }
 status_t AudioRecord::set(
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index c7967e5..139d931 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -1966,8 +1966,8 @@
     return result.value_or(false);
 }
 
-status_t AudioSystem::getHwOffloadEncodingFormatsSupportedForA2DP(
-        std::vector<audio_format_t>* formats) {
+status_t AudioSystem::getHwOffloadFormatsSupportedForBluetoothMedia(
+        audio_devices_t device, std::vector<audio_format_t>* formats) {
     if (formats == nullptr) {
         return BAD_VALUE;
     }
@@ -1977,8 +1977,10 @@
     if (aps == 0) return PERMISSION_DENIED;
 
     std::vector<media::audio::common::AudioFormat> formatsAidl;
+    int32_t deviceAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_devices_t_int32_t(device));
+
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
-            aps->getHwOffloadEncodingFormatsSupportedForA2DP(&formatsAidl)));
+            aps->getHwOffloadFormatsSupportedForBluetoothMedia(deviceAidl, &formatsAidl)));
     *formats = VALUE_OR_RETURN_STATUS(
             convertContainer<std::vector<audio_format_t>>(formatsAidl,
                                                           aidl2legacy_AudioFormat_audio_format_t));
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index c2bea66..81aa823 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -357,12 +357,13 @@
     // Otherwise the callback thread will never exit.
     stop();
     if (mAudioTrackThread != 0) { // not thread safe
-        mProxy->interrupt();
         mAudioTrackThread->requestExit();   // see comment in AudioTrack.h
+        mProxy->interrupt();
         mAudioTrackThread->requestExitAndWait();
         mAudioTrackThread.clear();
     }
-    // No lock here: worst case we remove a NULL callback which will be a nop
+
+    AutoMutex lock(mLock);
     if (mDeviceCallback != 0 && mOutput != AUDIO_IO_HANDLE_NONE) {
         // This may not stop all of these device callbacks!
         // TODO: Add some sort of protection.
@@ -571,11 +572,13 @@
     //  (b) we can support re-creation of offloaded tracks
     if (offloadInfo != NULL) {
         mOffloadInfoCopy = *offloadInfo;
-        mOffloadInfo = &mOffloadInfoCopy;
     } else {
-        mOffloadInfo = NULL;
         memset(&mOffloadInfoCopy, 0, sizeof(audio_offload_info_t));
         mOffloadInfoCopy = AUDIO_INFO_INITIALIZER;
+        mOffloadInfoCopy.format = format;
+        mOffloadInfoCopy.sample_rate = sampleRate;
+        mOffloadInfoCopy.channel_mask = channelMask;
+        mOffloadInfoCopy.stream_type = streamType;
     }
 
     mVolume[AUDIO_INTERLEAVE_LEFT] = 1.0f;
diff --git a/media/libaudioclient/AudioTrackShared.cpp b/media/libaudioclient/AudioTrackShared.cpp
index 35719be..e3b79b2 100644
--- a/media/libaudioclient/AudioTrackShared.cpp
+++ b/media/libaudioclient/AudioTrackShared.cpp
@@ -409,7 +409,7 @@
         android_atomic_or(CBLK_FUTEX_WAKE, &cblk->mFutex);
         // it seems that a FUTEX_WAKE_PRIVATE will not wake a FUTEX_WAIT, even within same process
         (void) syscall(__NR_futex, &cblk->mFutex, mClientInServer ? FUTEX_WAKE_PRIVATE : FUTEX_WAKE,
-                1);
+                INT_MAX);
     }
 }
 
@@ -419,7 +419,7 @@
     if (!(android_atomic_or(CBLK_INTERRUPT, &cblk->mFlags) & CBLK_INTERRUPT)) {
         android_atomic_or(CBLK_FUTEX_WAKE, &cblk->mFutex);
         (void) syscall(__NR_futex, &cblk->mFutex, mClientInServer ? FUTEX_WAKE_PRIVATE : FUTEX_WAKE,
-                1);
+                INT_MAX);
     }
 }
 
@@ -490,6 +490,8 @@
 status_t AudioTrackClientProxy::waitStreamEndDone(const struct timespec *requested)
 {
     struct timespec total;          // total elapsed time spent waiting
+    struct timespec before;
+    bool beforeIsValid = false;
     total.tv_sec = 0;
     total.tv_nsec = 0;
     audio_track_cblk_t* cblk = mCblk;
@@ -570,17 +572,38 @@
         }
         int32_t old = android_atomic_and(~CBLK_FUTEX_WAKE, &cblk->mFutex);
         if (!(old & CBLK_FUTEX_WAKE)) {
+            if (!beforeIsValid) {
+                clock_gettime(CLOCK_MONOTONIC, &before);
+                beforeIsValid = true;
+            }
             errno = 0;
             (void) syscall(__NR_futex, &cblk->mFutex,
                     mClientInServer ? FUTEX_WAIT_PRIVATE : FUTEX_WAIT, old & ~CBLK_FUTEX_WAKE, ts);
-            switch (errno) {
+            status_t error = errno; // clock_gettime can affect errno
+            {
+                struct timespec after;
+                clock_gettime(CLOCK_MONOTONIC, &after);
+                total.tv_sec += after.tv_sec - before.tv_sec;
+                // Use auto instead of long to avoid the google-runtime-int warning.
+                auto deltaNs = after.tv_nsec - before.tv_nsec;
+                if (deltaNs < 0) {
+                    deltaNs += 1000000000;
+                    total.tv_sec--;
+                }
+                if ((total.tv_nsec += deltaNs) >= 1000000000) {
+                    total.tv_nsec -= 1000000000;
+                    total.tv_sec++;
+                }
+                before = after;
+            }
+            switch (error) {
             case 0:            // normal wakeup by server, or by binderDied()
             case EWOULDBLOCK:  // benign race condition with server
             case EINTR:        // wait was interrupted by signal or other spurious wakeup
             case ETIMEDOUT:    // time-out expired
                 break;
             default:
-                status = errno;
+                status = error;
                 ALOGE("%s unexpected error %s", __func__, strerror(status));
                 goto end;
             }
@@ -747,7 +770,7 @@
             int32_t old = android_atomic_or(CBLK_FUTEX_WAKE, &cblk->mFutex);
             if (!(old & CBLK_FUTEX_WAKE)) {
                 (void) syscall(__NR_futex, &cblk->mFutex,
-                        mClientInServer ? FUTEX_WAKE_PRIVATE : FUTEX_WAKE, 1);
+                        mClientInServer ? FUTEX_WAKE_PRIVATE : FUTEX_WAKE, INT_MAX);
             }
         }
         mFlushed += (newFront - front) & mask;
@@ -917,7 +940,7 @@
         int32_t old = android_atomic_or(CBLK_FUTEX_WAKE, &cblk->mFutex);
         if (!(old & CBLK_FUTEX_WAKE)) {
             (void) syscall(__NR_futex, &cblk->mFutex,
-                    mClientInServer ? FUTEX_WAKE_PRIVATE : FUTEX_WAKE, 1);
+                    mClientInServer ? FUTEX_WAKE_PRIVATE : FUTEX_WAKE, INT_MAX);
         }
     }
 
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index 2af1c50..504e4f8 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -765,6 +765,12 @@
     return statusTFromBinderStatus(mDelegate->updateSecondaryOutputs(trackSecondaryOutputInfos));
 }
 
+status_t AudioFlingerClientAdapter::setDeviceConnectedState(
+        const struct audio_port_v7 *port, bool connected) {
+    media::AudioPort aidlPort = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_port_v7_AudioPort(*port));
+    return statusTFromBinderStatus(mDelegate->setDeviceConnectedState(aidlPort, connected));
+}
 
 ////////////////////////////////////////////////////////////////////////////////////////////////////
 // AudioFlingerServerAdapter
@@ -1236,4 +1242,10 @@
     return Status::fromStatusT(mDelegate->updateSecondaryOutputs(trackSecondaryOutputs));
 }
 
+Status AudioFlingerServerAdapter::setDeviceConnectedState(
+        const media::AudioPort& port, bool connected) {
+    audio_port_v7 portLegacy = VALUE_OR_RETURN_BINDER(aidl2legacy_AudioPort_audio_port_v7(port));
+    return Status::fromStatusT(mDelegate->setDeviceConnectedState(&portLegacy, connected));
+}
+
 } // namespace android
diff --git a/media/libaudioclient/PolicyAidlConversion.cpp b/media/libaudioclient/PolicyAidlConversion.cpp
index 25fdb49..dcfde8b 100644
--- a/media/libaudioclient/PolicyAidlConversion.cpp
+++ b/media/libaudioclient/PolicyAidlConversion.cpp
@@ -26,16 +26,6 @@
 
 using base::unexpected;
 
-ConversionResult<volume_group_t>
-aidl2legacy_int32_t_volume_group_t(int32_t aidl) {
-    return convertReinterpret<volume_group_t>(aidl);
-}
-
-ConversionResult<int32_t>
-legacy2aidl_volume_group_t_int32_t(volume_group_t legacy) {
-    return convertReinterpret<int32_t>(legacy);
-}
-
 ConversionResult<uint32_t>
 aidl2legacy_AudioMixType_uint32_t(media::AudioMixType aidl) {
     switch (aidl) {
diff --git a/media/libaudioclient/aidl/android/media/AudioContentType.aidl b/media/libaudioclient/aidl/android/media/AudioContentType.aidl
index f734fba..a7d3277 100644
--- a/media/libaudioclient/aidl/android/media/AudioContentType.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioContentType.aidl
@@ -22,4 +22,5 @@
     MUSIC = 2,
     MOVIE = 3,
     SONIFICATION = 4,
+    ULTRASOUND = 1997,
 }
diff --git a/media/libaudioclient/aidl/android/media/AudioInputFlags.aidl b/media/libaudioclient/aidl/android/media/AudioInputFlags.aidl
index bfc0eb0..d79769c 100644
--- a/media/libaudioclient/aidl/android/media/AudioInputFlags.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioInputFlags.aidl
@@ -28,4 +28,5 @@
     VOIP_TX    = 5,
     HW_AV_SYNC = 6,
     DIRECT     = 7,
+    ULTRASOUND = 8,
 }
diff --git a/media/libaudioclient/aidl/android/media/AudioOutputFlags.aidl b/media/libaudioclient/aidl/android/media/AudioOutputFlags.aidl
index cebd8f0..f49b24c 100644
--- a/media/libaudioclient/aidl/android/media/AudioOutputFlags.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioOutputFlags.aidl
@@ -36,4 +36,6 @@
     VOIP_RX          = 13,
     INCALL_MUSIC     = 14,
     GAPLESS_OFFLOAD  = 15,
+    SPATIALIZER      = 16,
+    ULTRASOUND       = 17,
 }
diff --git a/media/libaudioclient/aidl/android/media/AudioSourceType.aidl b/media/libaudioclient/aidl/android/media/AudioSourceType.aidl
index 8673b92..2006e6c 100644
--- a/media/libaudioclient/aidl/android/media/AudioSourceType.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioSourceType.aidl
@@ -40,4 +40,5 @@
      * Used only internally by the framework.
      */
     HOTWORD = 1999,
+    ULTRASOUND = 2000,
 }
diff --git a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
index 7ffcc33..5cdde5d 100644
--- a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
@@ -216,4 +216,6 @@
     // This usually happens when there is a dynamic policy registered.
     void updateSecondaryOutputs(
             in TrackSecondaryOutputInfo[] trackSecondaryOutputInfos);
+
+    void setDeviceConnectedState(in AudioPort devicePort, boolean connected);
 }
diff --git a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
index 5f0a1de..6140a64 100644
--- a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
@@ -295,7 +295,7 @@
     void getReportedSurroundFormats(inout Int count,
                                     out AudioFormat[] formats);
 
-    AudioFormat[] getHwOffloadEncodingFormatsSupportedForA2DP();
+    AudioFormat[] getHwOffloadFormatsSupportedForBluetoothMedia(int /* audio_devices_t */ device);
 
     void setSurroundFormatEnabled(AudioFormat audioFormat, boolean enabled);
 
diff --git a/media/libaudioclient/include/media/AudioEffect.h b/media/libaudioclient/include/media/AudioEffect.h
index dd4d2da..02ff43f 100644
--- a/media/libaudioclient/include/media/AudioEffect.h
+++ b/media/libaudioclient/include/media/AudioEffect.h
@@ -136,7 +136,7 @@
      *                      indicated by count.
      *      PERMISSION_DENIED could not get AudioFlinger interface
      *      NO_INIT         effect library failed to initialize
-     *      BAD_VALUE       invalid audio session or descriptor pointers
+     *      BAD_VALUE       invalid audio session, or invalid descriptor or count pointers
      *
      * Returned value
      *   *descriptor updated with descriptors of pre processings enabled by default
@@ -160,6 +160,7 @@
      *      NO_ERROR        successful operation.
      *      PERMISSION_DENIED could not get AudioFlinger interface
      *                        or caller lacks required permissions.
+     *      BAD_VALUE       invalid pointer to id
      * Returned value
      *   *id:  The new unique system-wide effect id.
      */
@@ -194,7 +195,7 @@
      *      PERMISSION_DENIED could not get AudioFlinger interface
      *                        or caller lacks required permissions.
      *      NO_INIT         effect library failed to initialize.
-     *      BAD_VALUE       invalid source, type uuid or implementation uuid.
+     *      BAD_VALUE       invalid source, type uuid or implementation uuid, or id pointer
      *      NAME_NOT_FOUND  no effect with this uuid or type found.
      *
      * Returned value
@@ -233,7 +234,7 @@
      *      PERMISSION_DENIED could not get AudioFlinger interface
      *                        or caller lacks required permissions.
      *      NO_INIT         effect library failed to initialize.
-     *      BAD_VALUE       invalid type uuid or implementation uuid.
+     *      BAD_VALUE       invalid type uuid or implementation uuid, or id pointer
      *      NAME_NOT_FOUND  no effect with this uuid or type found.
      *
      * Returned value
@@ -455,7 +456,7 @@
      * Returned status (from utils/Errors.h) can be:
      *  - NO_ERROR: successful operation.
      *  - INVALID_OPERATION: the application does not have control of the effect engine.
-     *  - BAD_VALUE: invalid parameter identifier or value.
+     *  - BAD_VALUE: invalid parameter structure pointer, or invalid identifier or value.
      *  - DEAD_OBJECT: the effect engine has been deleted.
      */
      virtual status_t   setParameter(effect_param_t *param);
@@ -500,7 +501,7 @@
      * Returned status (from utils/Errors.h) can be:
      *  - NO_ERROR: successful operation.
      *  - INVALID_OPERATION: the AudioEffect was not successfully initialized.
-     *  - BAD_VALUE: invalid parameter identifier.
+     *  - BAD_VALUE: invalid parameter structure pointer, or invalid parameter identifier.
      *  - DEAD_OBJECT: the effect engine has been deleted.
      */
      virtual status_t   getParameter(effect_param_t *param);
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index 869bd6e..4d85f7a 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -420,8 +420,8 @@
 
     static status_t getMicrophones(std::vector<media::MicrophoneInfo> *microphones);
 
-    static status_t getHwOffloadEncodingFormatsSupportedForA2DP(
-                                    std::vector<audio_format_t> *formats);
+    static status_t getHwOffloadFormatsSupportedForBluetoothMedia(
+                                    audio_devices_t device, std::vector<audio_format_t> *formats);
 
     // numSurroundFormats holds the maximum number of formats and bool value allowed in the array.
     // When numSurroundFormats is 0, surroundFormats and surroundFormatsEnabled will not be
diff --git a/media/libaudioclient/include/media/AudioTrack.h b/media/libaudioclient/include/media/AudioTrack.h
index fa21265..285a28a 100644
--- a/media/libaudioclient/include/media/AudioTrack.h
+++ b/media/libaudioclient/include/media/AudioTrack.h
@@ -1182,7 +1182,6 @@
     sp<IMemory>             mSharedBuffer;
     transfer_type           mTransfer;
     audio_offload_info_t    mOffloadInfoCopy;
-    const audio_offload_info_t* mOffloadInfo;
     audio_attributes_t      mAttributes;
 
     size_t                  mFrameSize;             // frame size in bytes
diff --git a/media/libaudioclient/include/media/IAudioFlinger.h b/media/libaudioclient/include/media/IAudioFlinger.h
index 9e5019e..6d4ab8e 100644
--- a/media/libaudioclient/include/media/IAudioFlinger.h
+++ b/media/libaudioclient/include/media/IAudioFlinger.h
@@ -347,6 +347,8 @@
 
     virtual status_t updateSecondaryOutputs(
             const TrackSecondaryOutputsMap& trackSecondaryOutputs) = 0;
+
+    virtual status_t setDeviceConnectedState(const struct audio_port_v7 *port, bool connected) = 0;
 };
 
 /**
@@ -443,6 +445,7 @@
     status_t setVibratorInfos(const std::vector<media::AudioVibratorInfo>& vibratorInfos) override;
     status_t updateSecondaryOutputs(
             const TrackSecondaryOutputsMap& trackSecondaryOutputs) override;
+    status_t setDeviceConnectedState(const struct audio_port_v7 *port, bool connected) override;
 
 private:
     const sp<media::IAudioFlingerService> mDelegate;
@@ -528,6 +531,7 @@
             SET_AUDIO_HAL_PIDS = media::BnAudioFlingerService::TRANSACTION_setAudioHalPids,
             SET_VIBRATOR_INFOS = media::BnAudioFlingerService::TRANSACTION_setVibratorInfos,
             UPDATE_SECONDARY_OUTPUTS = media::BnAudioFlingerService::TRANSACTION_updateSecondaryOutputs,
+            SET_DEVICE_CONNECTED_STATE = media::BnAudioFlingerService::TRANSACTION_setDeviceConnectedState,
         };
 
         /**
@@ -637,6 +641,7 @@
     Status setVibratorInfos(const std::vector<media::AudioVibratorInfo>& vibratorInfos) override;
     Status updateSecondaryOutputs(
             const std::vector<media::TrackSecondaryOutputInfo>& trackSecondaryOutputInfos) override;
+    Status setDeviceConnectedState(const media::AudioPort& port, bool connected) override;
 
 private:
     const sp<AudioFlingerServerAdapter::Delegate> mDelegate;
diff --git a/media/libaudioclient/include/media/PolicyAidlConversion.h b/media/libaudioclient/include/media/PolicyAidlConversion.h
index 873f27a..2cfa438 100644
--- a/media/libaudioclient/include/media/PolicyAidlConversion.h
+++ b/media/libaudioclient/include/media/PolicyAidlConversion.h
@@ -39,11 +39,6 @@
 
 namespace android {
 
-ConversionResult<volume_group_t>
-aidl2legacy_int32_t_volume_group_t(int32_t aidl);
-ConversionResult<int32_t>
-legacy2aidl_volume_group_t_int32_t(volume_group_t legacy);
-
 ConversionResult<product_strategy_t>
 aidl2legacy_int32_t_product_strategy_t(int32_t aidl);
 ConversionResult<int32_t>
diff --git a/media/libaudiofoundation/AudioContainers.cpp b/media/libaudiofoundation/AudioContainers.cpp
index 31257d5..3df9378 100644
--- a/media/libaudiofoundation/AudioContainers.cpp
+++ b/media/libaudiofoundation/AudioContainers.cpp
@@ -63,6 +63,13 @@
     return audioDeviceInAllUsbSet;
 }
 
+const DeviceTypeSet& getAudioDeviceOutAllBleSet() {
+    static const DeviceTypeSet audioDeviceOutAllBleSet = DeviceTypeSet(
+            std::begin(AUDIO_DEVICE_OUT_ALL_BLE_ARRAY),
+            std::end(AUDIO_DEVICE_OUT_ALL_BLE_ARRAY));
+    return audioDeviceOutAllBleSet;
+}
+
 bool deviceTypesToString(const DeviceTypeSet &deviceTypes, std::string &str) {
     if (deviceTypes.empty()) {
         str = "Empty device types";
diff --git a/media/libaudiofoundation/include/media/AudioContainers.h b/media/libaudiofoundation/include/media/AudioContainers.h
index 204b365..60b42fb 100644
--- a/media/libaudiofoundation/include/media/AudioContainers.h
+++ b/media/libaudiofoundation/include/media/AudioContainers.h
@@ -40,6 +40,7 @@
 const DeviceTypeSet& getAudioDeviceOutAllUsbSet();
 const DeviceTypeSet& getAudioDeviceInAllSet();
 const DeviceTypeSet& getAudioDeviceInAllUsbSet();
+const DeviceTypeSet& getAudioDeviceOutAllBleSet();
 
 template<typename T>
 static std::vector<T> Intersection(const std::set<T>& a, const std::set<T>& b) {
diff --git a/media/libaudiohal/Android.bp b/media/libaudiohal/Android.bp
index bd24c84..b32c735 100644
--- a/media/libaudiohal/Android.bp
+++ b/media/libaudiohal/Android.bp
@@ -27,6 +27,7 @@
         "libaudiohal@5.0",
         "libaudiohal@6.0",
         "libaudiohal@7.0",
+        "libaudiohal@7.1",
     ],
 
     shared_libs: [
diff --git a/media/libaudiohal/FactoryHalHidl.cpp b/media/libaudiohal/FactoryHalHidl.cpp
index e420d07..804edcc 100644
--- a/media/libaudiohal/FactoryHalHidl.cpp
+++ b/media/libaudiohal/FactoryHalHidl.cpp
@@ -31,6 +31,7 @@
 /** Supported HAL versions, in order of preference.
  */
 const char* sAudioHALVersions[] = {
+    "7.1",
     "7.0",
     "6.0",
     "5.0",
@@ -94,7 +95,7 @@
 }  // namespace
 
 void* createPreferredImpl(const std::string& package, const std::string& interface) {
-    for (auto version = detail::sAudioHALVersions; version != nullptr; ++version) {
+    for (auto version = detail::sAudioHALVersions; *version != nullptr; ++version) {
         void* rawInterface = nullptr;
         if (hasHalService(package, *version, interface)
                 && createHalService(*version, interface, &rawInterface)) {
diff --git a/media/libaudiohal/impl/Android.bp b/media/libaudiohal/impl/Android.bp
index a2c6e8a..e9c8723 100644
--- a/media/libaudiohal/impl/Android.bp
+++ b/media/libaudiohal/impl/Android.bp
@@ -7,22 +7,33 @@
     default_applicable_licenses: ["frameworks_av_license"],
 }
 
-cc_defaults {
-    name: "libaudiohal_default",
-
+filegroup {
+    name: "audio_core_hal_client_sources",
     srcs: [
         "DeviceHalLocal.cpp",
         "DevicesFactoryHalHybrid.cpp",
         "DevicesFactoryHalLocal.cpp",
-        "StreamHalLocal.cpp",
-
-        "ConversionHelperHidl.cpp",
         "DeviceHalHidl.cpp",
         "DevicesFactoryHalHidl.cpp",
+        "StreamHalLocal.cpp",
+        "StreamHalHidl.cpp",
+    ],
+}
+
+filegroup {
+    name: "audio_effect_hal_client_sources",
+    srcs: [
         "EffectBufferHalHidl.cpp",
         "EffectHalHidl.cpp",
         "EffectsFactoryHalHidl.cpp",
-        "StreamHalHidl.cpp",
+    ],
+}
+
+cc_defaults {
+    name: "libaudiohal_default",
+
+    srcs: [
+        "ConversionHelperHidl.cpp",
     ],
 
     cflags: [
@@ -65,6 +76,10 @@
 cc_library_shared {
     name: "libaudiohal@4.0",
     defaults: ["libaudiohal_default"],
+    srcs: [
+        ":audio_core_hal_client_sources",
+        ":audio_effect_hal_client_sources",
+    ],
     shared_libs: [
         "android.hardware.audio.common@4.0",
         "android.hardware.audio.common@4.0-util",
@@ -83,6 +98,10 @@
 cc_library_shared {
     name: "libaudiohal@5.0",
     defaults: ["libaudiohal_default"],
+    srcs: [
+        ":audio_core_hal_client_sources",
+        ":audio_effect_hal_client_sources",
+    ],
     shared_libs: [
         "android.hardware.audio.common@5.0",
         "android.hardware.audio.common@5.0-util",
@@ -101,6 +120,10 @@
 cc_library_shared {
     name: "libaudiohal@6.0",
     defaults: ["libaudiohal_default"],
+    srcs: [
+        ":audio_core_hal_client_sources",
+        ":audio_effect_hal_client_sources",
+    ],
     shared_libs: [
         "android.hardware.audio.common@6.0",
         "android.hardware.audio.common@6.0-util",
@@ -119,6 +142,10 @@
 cc_library_shared {
     name: "libaudiohal@7.0",
     defaults: ["libaudiohal_default"],
+    srcs: [
+        ":audio_core_hal_client_sources",
+        ":audio_effect_hal_client_sources",
+    ],
     shared_libs: [
         "android.hardware.audio.common@7.0",
         "android.hardware.audio.common@7.0-util",
@@ -133,3 +160,25 @@
         "-include common/all-versions/VersionMacro.h",
     ]
 }
+
+cc_library_shared {
+    name: "libaudiohal@7.1",
+    defaults: ["libaudiohal_default"],
+    srcs: [
+        ":audio_core_hal_client_sources",
+    ],
+    shared_libs: [
+        "android.hardware.audio.common@7.0",
+        "android.hardware.audio.common@7.1-util",
+        "android.hardware.audio@7.0",
+        "android.hardware.audio@7.1",
+        "android.hardware.audio@7.1-util",
+    ],
+    cflags: [
+        "-DMAJOR_VERSION=7",
+        "-DMINOR_VERSION=1",
+        "-DCOMMON_TYPES_MINOR_VERSION=0",
+        "-DCORE_TYPES_MINOR_VERSION=0",
+        "-include common/all-versions/VersionMacro.h",
+    ]
+}
diff --git a/media/libaudiohal/impl/ConversionHelperHidl.cpp b/media/libaudiohal/impl/ConversionHelperHidl.cpp
index 0503698..1d34814 100644
--- a/media/libaudiohal/impl/ConversionHelperHidl.cpp
+++ b/media/libaudiohal/impl/ConversionHelperHidl.cpp
@@ -24,10 +24,9 @@
 #include "ConversionHelperHidl.h"
 
 namespace android {
-namespace CPP_VERSION {
 
-using namespace ::android::hardware::audio::common::CPP_VERSION;
-using namespace ::android::hardware::audio::CPP_VERSION;
+using namespace ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION;
+using namespace ::android::hardware::audio::CORE_TYPES_CPP_VERSION;
 
 // static
 status_t ConversionHelperHidl::keysFromHal(const String8& keys, hidl_vec<hidl_string> *hidlKeys) {
@@ -129,5 +128,4 @@
     ALOGE("%s %p %s: %s (from rpc)", mClassName, this, funcName, description);
 }
 
-}  // namespace CPP_VERSION
 }  // namespace android
diff --git a/media/libaudiohal/impl/ConversionHelperHidl.h b/media/libaudiohal/impl/ConversionHelperHidl.h
index 2909013..9368551 100644
--- a/media/libaudiohal/impl/ConversionHelperHidl.h
+++ b/media/libaudiohal/impl/ConversionHelperHidl.h
@@ -17,22 +17,21 @@
 #ifndef ANDROID_HARDWARE_CONVERSION_HELPER_HIDL_H
 #define ANDROID_HARDWARE_CONVERSION_HELPER_HIDL_H
 
-#include PATH(android/hardware/audio/FILE_VERSION/types.h)
+#include PATH(android/hardware/audio/CORE_TYPES_FILE_VERSION/types.h)
 #include <hidl/HidlSupport.h>
 #include <system/audio.h>
 #include <utils/String8.h>
 #include <utils/String16.h>
 #include <utils/Vector.h>
 
-using ::android::hardware::audio::CPP_VERSION::ParameterValue;
-using CoreResult = ::android::hardware::audio::CPP_VERSION::Result;
+using ::android::hardware::audio::CORE_TYPES_CPP_VERSION::ParameterValue;
+using CoreResult = ::android::hardware::audio::CORE_TYPES_CPP_VERSION::Result;
 
 using ::android::hardware::Return;
 using ::android::hardware::hidl_string;
 using ::android::hardware::hidl_vec;
 
 namespace android {
-namespace CPP_VERSION {
 
 class ConversionHelperHidl {
   protected:
@@ -85,7 +84,6 @@
     void emitError(const char* funcName, const char* description);
 };
 
-}  // namespace CPP_VERSION
 }  // namespace android
 
 #endif // ANDROID_HARDWARE_CONVERSION_HELPER_HIDL_H
diff --git a/media/libaudiohal/impl/DeviceHalHidl.cpp b/media/libaudiohal/impl/DeviceHalHidl.cpp
index aa94eea..ca2286e 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalHidl.cpp
@@ -30,27 +30,40 @@
 #include <util/CoreUtils.h>
 
 #include "DeviceHalHidl.h"
-#include "EffectHalHidl.h"
 #include "ParameterUtils.h"
 #include "StreamHalHidl.h"
 
-using ::android::hardware::audio::common::CPP_VERSION::implementation::HidlUtils;
+using ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION::implementation::HidlUtils;
 using ::android::hardware::audio::common::utils::EnumBitfield;
-using ::android::hardware::audio::CPP_VERSION::implementation::CoreUtils;
+using ::android::hardware::audio::CORE_TYPES_CPP_VERSION::implementation::CoreUtils;
 using ::android::hardware::hidl_string;
 using ::android::hardware::hidl_vec;
 
 namespace android {
-namespace CPP_VERSION {
 
-using namespace ::android::hardware::audio::common::CPP_VERSION;
-using namespace ::android::hardware::audio::CPP_VERSION;
+using namespace ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION;
+using namespace ::android::hardware::audio::CORE_TYPES_CPP_VERSION;
 
-using EffectHalHidl = ::android::effect::CPP_VERSION::EffectHalHidl;
+DeviceHalHidl::DeviceHalHidl(const sp<::android::hardware::audio::CPP_VERSION::IDevice>& device)
+        : ConversionHelperHidl("Device"), mDevice(device) {
+}
 
-DeviceHalHidl::DeviceHalHidl(const sp<IDevice>& device)
-        : ConversionHelperHidl("Device"), mDevice(device),
-          mPrimaryDevice(IPrimaryDevice::castFrom(device)) {
+DeviceHalHidl::DeviceHalHidl(
+        const sp<::android::hardware::audio::CPP_VERSION::IPrimaryDevice>& device)
+        : ConversionHelperHidl("Device"),
+#if MAJOR_VERSION <= 6 || (MAJOR_VERSION == 7 && MINOR_VERSION == 0)
+          mDevice(device),
+#endif
+          mPrimaryDevice(device) {
+#if MAJOR_VERSION == 7 && MINOR_VERSION == 1
+    auto getDeviceRet = mPrimaryDevice->getDevice();
+    if (getDeviceRet.isOk()) {
+        mDevice = getDeviceRet;
+    } else {
+        ALOGE("Call to IPrimaryDevice.getDevice has failed: %s",
+                getDeviceRet.description().c_str());
+    }
+#endif
 }
 
 DeviceHalHidl::~DeviceHalHidl() {
@@ -209,12 +222,17 @@
         return status;
     }
     Result retval = Result::NOT_INITIALIZED;
+#if MAJOR_VERSION == 7 && MINOR_VERSION == 1
+    Return<void> ret = mDevice->openOutputStream_7_1(
+#else
     Return<void> ret = mDevice->openOutputStream(
+#endif
             handle, hidlDevice, hidlConfig, hidlFlags,
 #if MAJOR_VERSION >= 4
             {} /* metadata */,
 #endif
-            [&](Result r, const sp<IStreamOut>& result, const AudioConfig& suggestedConfig) {
+            [&](Result r, const sp<::android::hardware::audio::CPP_VERSION::IStreamOut>& result,
+                    const AudioConfig& suggestedConfig) {
                 retval = r;
                 if (retval == Result::OK) {
                     *outStream = new StreamOutHalHidl(result);
@@ -284,7 +302,9 @@
 #endif
     Return<void> ret = mDevice->openInputStream(
             handle, hidlDevice, hidlConfig, hidlFlags, sinkMetadata,
-            [&](Result r, const sp<IStreamIn>& result, const AudioConfig& suggestedConfig) {
+            [&](Result r,
+                const sp<::android::hardware::audio::CORE_TYPES_CPP_VERSION::IStreamIn>& result,
+                    const AudioConfig& suggestedConfig) {
                 retval = r;
                 if (retval == Result::OK) {
                     *inStream = new StreamInHalHidl(result);
@@ -432,8 +452,7 @@
         audio_port_handle_t device, sp<EffectHalInterface> effect) {
     if (mDevice == 0) return NO_INIT;
     return processReturn("addDeviceEffect", mDevice->addDeviceEffect(
-            static_cast<AudioPortHandle>(device),
-            static_cast<EffectHalHidl*>(effect.get())->effectId()));
+            static_cast<AudioPortHandle>(device), effect->effectId()));
 }
 #else
 status_t DeviceHalHidl::addDeviceEffect(
@@ -447,8 +466,7 @@
         audio_port_handle_t device, sp<EffectHalInterface> effect) {
     if (mDevice == 0) return NO_INIT;
     return processReturn("removeDeviceEffect", mDevice->removeDeviceEffect(
-            static_cast<AudioPortHandle>(device),
-            static_cast<EffectHalHidl*>(effect.get())->effectId()));
+            static_cast<AudioPortHandle>(device), effect->effectId()));
 }
 #else
 status_t DeviceHalHidl::removeDeviceEffect(
@@ -457,6 +475,32 @@
 }
 #endif
 
+status_t DeviceHalHidl::setConnectedState(const struct audio_port_v7 *port, bool connected) {
+    if (mDevice == 0) return NO_INIT;
+#if MAJOR_VERSION == 7 && MINOR_VERSION == 1
+    if (supportsSetConnectedState7_1) {
+        AudioPort hidlPort;
+        if (status_t result = HidlUtils::audioPortFromHal(*port, &hidlPort); result != NO_ERROR) {
+            return result;
+        }
+        Return<Result> ret = mDevice->setConnectedState_7_1(hidlPort, connected);
+        if (!ret.isOk() || ret != Result::NOT_SUPPORTED) {
+            return processReturn("setConnectedState_7_1", ret);
+        } else if (ret == Result::OK) {
+            return NO_ERROR;
+        }
+        supportsSetConnectedState7_1 = false;
+    }
+#endif
+    DeviceAddress hidlAddress;
+    if (status_t result = CoreUtils::deviceAddressFromHal(
+                    port->ext.device.type, port->ext.device.address, &hidlAddress);
+            result != NO_ERROR) {
+        return result;
+    }
+    return processReturn("setConnectedState", mDevice->setConnectedState(hidlAddress, connected));
+}
+
 status_t DeviceHalHidl::dump(int fd, const Vector<String16>& args) {
     if (mDevice == 0) return NO_INIT;
     native_handle_t* hidlHandle = native_handle_create(1, 0);
@@ -480,5 +524,4 @@
     return processReturn("dump", ret);
 }
 
-} // namespace CPP_VERSION
 } // namespace android
diff --git a/media/libaudiohal/impl/DeviceHalHidl.h b/media/libaudiohal/impl/DeviceHalHidl.h
index 2694ab3..fb0be5a 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.h
+++ b/media/libaudiohal/impl/DeviceHalHidl.h
@@ -20,15 +20,11 @@
 #include PATH(android/hardware/audio/FILE_VERSION/IDevice.h)
 #include PATH(android/hardware/audio/FILE_VERSION/IPrimaryDevice.h)
 #include <media/audiohal/DeviceHalInterface.h>
+#include <media/audiohal/EffectHalInterface.h>
 
 #include "ConversionHelperHidl.h"
 
-using ::android::hardware::audio::CPP_VERSION::IDevice;
-using ::android::hardware::audio::CPP_VERSION::IPrimaryDevice;
-using ::android::hardware::Return;
-
 namespace android {
-namespace CPP_VERSION {
 
 class DeviceHalHidl : public DeviceHalInterface, public ConversionHelperHidl
 {
@@ -119,15 +115,21 @@
     status_t addDeviceEffect(audio_port_handle_t device, sp<EffectHalInterface> effect) override;
     status_t removeDeviceEffect(audio_port_handle_t device, sp<EffectHalInterface> effect) override;
 
+    status_t setConnectedState(const struct audio_port_v7 *port, bool connected) override;
+
     status_t dump(int fd, const Vector<String16>& args) override;
 
   private:
     friend class DevicesFactoryHalHidl;
-    sp<IDevice> mDevice;
-    sp<IPrimaryDevice> mPrimaryDevice;  // Null if it's not a primary device.
+    sp<::android::hardware::audio::CPP_VERSION::IDevice> mDevice;
+    // Null if it's not a primary device.
+    sp<::android::hardware::audio::CPP_VERSION::IPrimaryDevice> mPrimaryDevice;
+    bool supportsSetConnectedState7_1 = true;
 
     // Can not be constructed directly by clients.
-    explicit DeviceHalHidl(const sp<IDevice>& device);
+    explicit DeviceHalHidl(const sp<::android::hardware::audio::CPP_VERSION::IDevice>& device);
+    explicit DeviceHalHidl(
+            const sp<::android::hardware::audio::CPP_VERSION::IPrimaryDevice>& device);
 
     // The destructor automatically closes the device.
     virtual ~DeviceHalHidl();
@@ -135,7 +137,6 @@
     template <typename HalPort> status_t getAudioPortImpl(HalPort *port);
 };
 
-} // namespace CPP_VERSION
 } // namespace android
 
 #endif // ANDROID_HARDWARE_DEVICE_HAL_HIDL_H
diff --git a/media/libaudiohal/impl/DeviceHalLocal.cpp b/media/libaudiohal/impl/DeviceHalLocal.cpp
index e0304af..e473e41 100644
--- a/media/libaudiohal/impl/DeviceHalLocal.cpp
+++ b/media/libaudiohal/impl/DeviceHalLocal.cpp
@@ -17,13 +17,13 @@
 #define LOG_TAG "DeviceHalLocal"
 //#define LOG_NDEBUG 0
 
+#include <media/AudioParameter.h>
 #include <utils/Log.h>
 
 #include "DeviceHalLocal.h"
 #include "StreamHalLocal.h"
 
 namespace android {
-namespace CPP_VERSION {
 
 DeviceHalLocal::DeviceHalLocal(audio_hw_device_t *dev)
         : mDev(dev) {
@@ -233,6 +233,14 @@
     return INVALID_OPERATION;
 }
 
+status_t DeviceHalLocal::setConnectedState(const struct audio_port_v7 *port, bool connected) {
+    AudioParameter param(String8(port->ext.device.address));
+    const String8 key(connected ?
+            AudioParameter::keyDeviceConnect : AudioParameter::keyDeviceDisconnect);
+    param.addInt(key, port->ext.device.type);
+    return setParameters(param.toString());
+}
+
 status_t DeviceHalLocal::dump(int fd, const Vector<String16>& /* args */) {
     return mDev->dump(mDev, fd);
 }
@@ -245,5 +253,4 @@
     mDev->close_input_stream(mDev, stream_in);
 }
 
-} // namespace CPP_VERSION
 } // namespace android
diff --git a/media/libaudiohal/impl/DeviceHalLocal.h b/media/libaudiohal/impl/DeviceHalLocal.h
index 2fde936..79db930 100644
--- a/media/libaudiohal/impl/DeviceHalLocal.h
+++ b/media/libaudiohal/impl/DeviceHalLocal.h
@@ -21,7 +21,6 @@
 #include <media/audiohal/DeviceHalInterface.h>
 
 namespace android {
-namespace CPP_VERSION {
 
 class DeviceHalLocal : public DeviceHalInterface
 {
@@ -112,6 +111,8 @@
     status_t addDeviceEffect(audio_port_handle_t device, sp<EffectHalInterface> effect) override;
     status_t removeDeviceEffect(audio_port_handle_t device, sp<EffectHalInterface> effect) override;
 
+    status_t setConnectedState(const struct audio_port_v7 *port, bool connected) override;
+
     status_t dump(int fd, const Vector<String16>& args) override;
 
     void closeOutputStream(struct audio_stream_out *stream_out);
@@ -131,7 +132,6 @@
     virtual ~DeviceHalLocal();
 };
 
-} // namespace CPP_VERSION
 } // namespace android
 
 #endif // ANDROID_HARDWARE_DEVICE_HAL_LOCAL_H
diff --git a/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp b/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp
index 1c0eacb..f475729 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp
+++ b/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp
@@ -31,14 +31,13 @@
 #include "DevicesFactoryHalHidl.h"
 
 using ::android::hardware::audio::CPP_VERSION::IDevice;
-using ::android::hardware::audio::CPP_VERSION::Result;
+using ::android::hardware::audio::CORE_TYPES_CPP_VERSION::Result;
 using ::android::hardware::Return;
 using ::android::hardware::Void;
 using ::android::hidl::manager::V1_0::IServiceManager;
 using ::android::hidl::manager::V1_0::IServiceNotification;
 
 namespace android {
-namespace CPP_VERSION {
 
 class ServiceNotificationListener : public IServiceNotification {
   public:
@@ -115,14 +114,37 @@
     if (status != OK) return status;
     Result retval = Result::NOT_INITIALIZED;
     for (const auto& factory : factories) {
-        Return<void> ret = factory->openDevice(
-                hidlId,
-                [&](Result r, const sp<IDevice>& result) {
-                    retval = r;
-                    if (retval == Result::OK) {
-                        *device = new DeviceHalHidl(result);
-                    }
-                });
+        Return<void> ret;
+        if (strcmp(name, AUDIO_HARDWARE_MODULE_ID_PRIMARY) == 0) {
+            // In V7.1 it's not possible to cast IDevice back to IPrimaryDevice,
+            // thus openPrimaryDevice must be used.
+#if MAJOR_VERSION == 7 && MINOR_VERSION == 1
+            ret = factory->openPrimaryDevice_7_1(
+#else
+            ret = factory->openPrimaryDevice(
+#endif
+                    [&](Result r,
+                        const sp<::android::hardware::audio::CPP_VERSION::IPrimaryDevice>& result) {
+                        retval = r;
+                        if (retval == Result::OK) {
+                            *device = new DeviceHalHidl(result);
+                        }
+                    });
+        } else {
+#if MAJOR_VERSION == 7 && MINOR_VERSION == 1
+            ret = factory->openDevice_7_1(
+#else
+            ret = factory->openDevice(
+#endif
+                    hidlId,
+                    [&](Result r,
+                        const sp<::android::hardware::audio::CPP_VERSION::IDevice>& result) {
+                        retval = r;
+                        if (retval == Result::OK) {
+                            *device = new DeviceHalHidl(result);
+                        }
+                    });
+        }
         if (!ret.isOk()) return FAILED_TRANSACTION;
         switch (retval) {
             // Device was found and was initialized successfully.
@@ -178,7 +200,8 @@
     return NO_ERROR;
 }
 
-void DevicesFactoryHalHidl::addDeviceFactory(sp<IDevicesFactory> factory, bool needToNotify) {
+void DevicesFactoryHalHidl::addDeviceFactory(
+        sp<::android::hardware::audio::CPP_VERSION::IDevicesFactory> factory, bool needToNotify) {
     // It is assumed that the DevicesFactoryHalInterface instance is owned
     // by AudioFlinger and thus have the same lifespan.
     factory->linkToDeath(HalDeathHandler::getInstance(), 0 /*cookie*/);
@@ -198,10 +221,10 @@
     }
 }
 
-std::vector<sp<IDevicesFactory>> DevicesFactoryHalHidl::copyDeviceFactories() {
+std::vector<sp<::android::hardware::audio::CPP_VERSION::IDevicesFactory>>
+        DevicesFactoryHalHidl::copyDeviceFactories() {
     std::lock_guard<std::mutex> lock(mLock);
     return mDeviceFactories;
 }
 
-} // namespace CPP_VERSION
 } // namespace android
diff --git a/media/libaudiohal/impl/DevicesFactoryHalHidl.h b/media/libaudiohal/impl/DevicesFactoryHalHidl.h
index 6f84efe..fd8dbc4 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalHidl.h
+++ b/media/libaudiohal/impl/DevicesFactoryHalHidl.h
@@ -30,7 +30,6 @@
 using ::android::hardware::audio::CPP_VERSION::IDevicesFactory;
 
 namespace android {
-namespace CPP_VERSION {
 
 class DevicesFactoryHalHidl : public DevicesFactoryHalInterface
 {
@@ -59,7 +58,6 @@
     virtual ~DevicesFactoryHalHidl() = default;
 };
 
-} // namespace CPP_VERSION
 } // namespace android
 
 #endif // ANDROID_HARDWARE_DEVICES_FACTORY_HAL_HIDL_H
diff --git a/media/libaudiohal/impl/DevicesFactoryHalHybrid.cpp b/media/libaudiohal/impl/DevicesFactoryHalHybrid.cpp
index cde8d85..d684c27 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalHybrid.cpp
+++ b/media/libaudiohal/impl/DevicesFactoryHalHybrid.cpp
@@ -22,7 +22,6 @@
 #include "DevicesFactoryHalLocal.h"
 
 namespace android {
-namespace CPP_VERSION {
 
 DevicesFactoryHalHybrid::DevicesFactoryHalHybrid(sp<IDevicesFactory> hidlFactory)
         : mLocalFactory(new DevicesFactoryHalLocal()),
@@ -51,11 +50,9 @@
     return INVALID_OPERATION;
 }
 
-} // namespace CPP_VERSION
-
 extern "C" __attribute__((visibility("default"))) void* createIDevicesFactory() {
     auto service = hardware::audio::CPP_VERSION::IDevicesFactory::getService();
-    return service ? new CPP_VERSION::DevicesFactoryHalHybrid(service) : nullptr;
+    return service ? new DevicesFactoryHalHybrid(service) : nullptr;
 }
 
 } // namespace android
diff --git a/media/libaudiohal/impl/DevicesFactoryHalHybrid.h b/media/libaudiohal/impl/DevicesFactoryHalHybrid.h
index 568a1fb..6b2b845 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalHybrid.h
+++ b/media/libaudiohal/impl/DevicesFactoryHalHybrid.h
@@ -25,7 +25,6 @@
 using ::android::hardware::audio::CPP_VERSION::IDevicesFactory;
 
 namespace android {
-namespace CPP_VERSION {
 
 class DevicesFactoryHalHybrid : public DevicesFactoryHalInterface
 {
@@ -45,7 +44,6 @@
     sp<DevicesFactoryHalInterface> mHidlFactory;
 };
 
-} // namespace CPP_VERSION
 } // namespace android
 
 #endif // ANDROID_HARDWARE_DEVICES_FACTORY_HAL_HYBRID_H
diff --git a/media/libaudiohal/impl/DevicesFactoryHalLocal.cpp b/media/libaudiohal/impl/DevicesFactoryHalLocal.cpp
index af67ff5..13a9acd 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalLocal.cpp
+++ b/media/libaudiohal/impl/DevicesFactoryHalLocal.cpp
@@ -26,7 +26,6 @@
 #include "DevicesFactoryHalLocal.h"
 
 namespace android {
-namespace CPP_VERSION {
 
 static status_t load_audio_interface(const char *if_name, audio_hw_device_t **dev)
 {
@@ -67,5 +66,4 @@
     return rc;
 }
 
-} // namespace CPP_VERSION
 } // namespace android
diff --git a/media/libaudiohal/impl/DevicesFactoryHalLocal.h b/media/libaudiohal/impl/DevicesFactoryHalLocal.h
index 32bf362..eacf109 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalLocal.h
+++ b/media/libaudiohal/impl/DevicesFactoryHalLocal.h
@@ -24,7 +24,6 @@
 #include "DeviceHalLocal.h"
 
 namespace android {
-namespace CPP_VERSION {
 
 class DevicesFactoryHalLocal : public DevicesFactoryHalInterface
 {
@@ -50,7 +49,6 @@
     virtual ~DevicesFactoryHalLocal() {}
 };
 
-} // namespace CPP_VERSION
 } // namespace android
 
 #endif // ANDROID_HARDWARE_DEVICES_FACTORY_HAL_LOCAL_H
diff --git a/media/libaudiohal/impl/EffectBufferHalHidl.cpp b/media/libaudiohal/impl/EffectBufferHalHidl.cpp
index 5367972..65297af 100644
--- a/media/libaudiohal/impl/EffectBufferHalHidl.cpp
+++ b/media/libaudiohal/impl/EffectBufferHalHidl.cpp
@@ -31,7 +31,6 @@
 
 namespace android {
 namespace effect {
-namespace CPP_VERSION {
 
 // static
 uint64_t EffectBufferHalHidl::makeUniqueId() {
@@ -144,5 +143,4 @@
 }
 
 } // namespace effect
-} // namespace CPP_VERSION
 } // namespace android
diff --git a/media/libaudiohal/impl/EffectBufferHalHidl.h b/media/libaudiohal/impl/EffectBufferHalHidl.h
index 4826813..a9df68b 100644
--- a/media/libaudiohal/impl/EffectBufferHalHidl.h
+++ b/media/libaudiohal/impl/EffectBufferHalHidl.h
@@ -28,7 +28,6 @@
 
 namespace android {
 namespace effect {
-namespace CPP_VERSION {
 
 using namespace ::android::hardware::audio::effect::CPP_VERSION;
 
@@ -74,7 +73,6 @@
     status_t init();
 };
 
-} // namespace CPP_VERSION
 } // namespace effect
 } // namespace android
 
diff --git a/media/libaudiohal/impl/EffectHalHidl.cpp b/media/libaudiohal/impl/EffectHalHidl.cpp
index 51ad146..1bb1e5f 100644
--- a/media/libaudiohal/impl/EffectHalHidl.cpp
+++ b/media/libaudiohal/impl/EffectHalHidl.cpp
@@ -36,7 +36,6 @@
 
 namespace android {
 namespace effect {
-namespace CPP_VERSION {
 
 using namespace ::android::hardware::audio::common::CPP_VERSION;
 using namespace ::android::hardware::audio::effect::CPP_VERSION;
@@ -310,6 +309,5 @@
     return result;
 }
 
-} // namespace CPP_VERSION
 } // namespace effect
 } // namespace android
diff --git a/media/libaudiohal/impl/EffectHalHidl.h b/media/libaudiohal/impl/EffectHalHidl.h
index 8e46638..07745db 100644
--- a/media/libaudiohal/impl/EffectHalHidl.h
+++ b/media/libaudiohal/impl/EffectHalHidl.h
@@ -28,7 +28,6 @@
 
 namespace android {
 namespace effect {
-namespace CPP_VERSION {
 
 using namespace ::android::hardware::audio::effect::CPP_VERSION;
 
@@ -63,7 +62,7 @@
 
     virtual status_t dump(int fd);
 
-    uint64_t effectId() const { return mEffectId; }
+    virtual uint64_t effectId() const { return mEffectId; }
 
   private:
     friend class EffectsFactoryHalHidl;
@@ -96,7 +95,6 @@
     status_t setProcessBuffers();
 };
 
-} // namespace CPP_VERSION
 } // namespace effect
 } // namespace android
 
diff --git a/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp b/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp
index ffe0d72..90954b2 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp
+++ b/media/libaudiohal/impl/EffectsFactoryHalHidl.cpp
@@ -33,7 +33,6 @@
 
 namespace android {
 namespace effect {
-namespace CPP_VERSION {
 
 using namespace ::android::hardware::audio::common::CPP_VERSION;
 using namespace ::android::hardware::audio::effect::CPP_VERSION;
@@ -204,12 +203,11 @@
     return EffectBufferHalHidl::mirror(external, size, buffer);
 }
 
-} // namespace CPP_VERSION
 } // namespace effect
 
 extern "C" __attribute__((visibility("default"))) void* createIEffectsFactory() {
     auto service = hardware::audio::effect::CPP_VERSION::IEffectsFactory::getService();
-    return service ? new effect::CPP_VERSION::EffectsFactoryHalHidl(service) : nullptr;
+    return service ? new effect::EffectsFactoryHalHidl(service) : nullptr;
 }
 
 } // namespace android
diff --git a/media/libaudiohal/impl/EffectsFactoryHalHidl.h b/media/libaudiohal/impl/EffectsFactoryHalHidl.h
index ff26d9f..7491133 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalHidl.h
+++ b/media/libaudiohal/impl/EffectsFactoryHalHidl.h
@@ -24,10 +24,9 @@
 
 namespace android {
 namespace effect {
-namespace CPP_VERSION {
 
 using ::android::hardware::hidl_vec;
-using ::android::CPP_VERSION::ConversionHelperHidl;
+using ::android::ConversionHelperHidl;
 using namespace ::android::hardware::audio::effect::CPP_VERSION;
 
 class EffectsFactoryHalHidl : public EffectsFactoryHalInterface, public ConversionHelperHidl
@@ -70,7 +69,6 @@
     status_t queryAllDescriptors();
 };
 
-} // namespace CPP_VERSION
 } // namespace effect
 } // namespace android
 
diff --git a/media/libaudiohal/impl/ParameterUtils.h b/media/libaudiohal/impl/ParameterUtils.h
index 9cab72e..b5dcb9d 100644
--- a/media/libaudiohal/impl/ParameterUtils.h
+++ b/media/libaudiohal/impl/ParameterUtils.h
@@ -16,17 +16,16 @@
 
 #pragma once
 
-#include PATH(android/hardware/audio/FILE_VERSION/types.h)
+#include PATH(android/hardware/audio/CORE_TYPES_FILE_VERSION/types.h)
 #include <hidl/HidlSupport.h>
 
-using ::android::hardware::audio::CPP_VERSION::ParameterValue;
-using ::android::hardware::audio::CPP_VERSION::Result;
+using ::android::hardware::audio::CORE_TYPES_CPP_VERSION::ParameterValue;
+using ::android::hardware::audio::CORE_TYPES_CPP_VERSION::Result;
 using ::android::hardware::Return;
 using ::android::hardware::hidl_vec;
 using ::android::hardware::hidl_string;
 
 namespace android {
-namespace CPP_VERSION {
 namespace utils {
 
 #if MAJOR_VERSION == 2
@@ -56,5 +55,4 @@
 #endif
 
 } // namespace utils
-} // namespace CPP_VERSION
 } // namespace android
diff --git a/media/libaudiohal/impl/StreamHalHidl.cpp b/media/libaudiohal/impl/StreamHalHidl.cpp
index e63aded..6916ca1 100644
--- a/media/libaudiohal/impl/StreamHalHidl.cpp
+++ b/media/libaudiohal/impl/StreamHalHidl.cpp
@@ -23,29 +23,26 @@
 #include <mediautils/SchedulingPolicyService.h>
 #include <utils/Log.h>
 
-#include PATH(android/hardware/audio/FILE_VERSION/IStreamOutCallback.h)
+#include PATH(android/hardware/audio/CORE_TYPES_FILE_VERSION/IStreamOutCallback.h)
 #include <HidlUtils.h>
 #include <util/CoreUtils.h>
 
 #include "DeviceHalHidl.h"
-#include "EffectHalHidl.h"
 #include "ParameterUtils.h"
 #include "StreamHalHidl.h"
 
-using ::android::hardware::audio::common::CPP_VERSION::implementation::HidlUtils;
-using ::android::hardware::audio::CPP_VERSION::implementation::CoreUtils;
+using ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION::implementation::HidlUtils;
+using ::android::hardware::audio::CORE_TYPES_CPP_VERSION::implementation::CoreUtils;
 using ::android::hardware::MQDescriptorSync;
 using ::android::hardware::Return;
 using ::android::hardware::Void;
 
 namespace android {
-namespace CPP_VERSION {
 
-using EffectHalHidl = ::android::effect::CPP_VERSION::EffectHalHidl;
-using ReadCommand = ::android::hardware::audio::CPP_VERSION::IStreamIn::ReadCommand;
+using ReadCommand = ::android::hardware::audio::CORE_TYPES_CPP_VERSION::IStreamIn::ReadCommand;
 
-using namespace ::android::hardware::audio::common::CPP_VERSION;
-using namespace ::android::hardware::audio::CPP_VERSION;
+using namespace ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION;
+using namespace ::android::hardware::audio::CORE_TYPES_CPP_VERSION;
 
 StreamHalHidl::StreamHalHidl(IStream *stream)
         : ConversionHelperHidl("Stream"),
@@ -137,14 +134,12 @@
 
 status_t StreamHalHidl::addEffect(sp<EffectHalInterface> effect) {
     if (!mStream) return NO_INIT;
-    return processReturn("addEffect", mStream->addEffect(
-                    static_cast<EffectHalHidl*>(effect.get())->effectId()));
+    return processReturn("addEffect", mStream->addEffect(effect->effectId()));
 }
 
 status_t StreamHalHidl::removeEffect(sp<EffectHalInterface> effect) {
     if (!mStream) return NO_INIT;
-    return processReturn("removeEffect", mStream->removeEffect(
-                    static_cast<EffectHalHidl*>(effect.get())->effectId()));
+    return processReturn("removeEffect", mStream->removeEffect(effect->effectId()));
 }
 
 status_t StreamHalHidl::standby() {
@@ -328,7 +323,8 @@
 
 }  // namespace
 
-StreamOutHalHidl::StreamOutHalHidl(const sp<IStreamOut>& stream)
+StreamOutHalHidl::StreamOutHalHidl(
+        const sp<::android::hardware::audio::CPP_VERSION::IStreamOut>& stream)
         : StreamHalHidl(stream.get()), mStream(stream), mWriterClient(0), mEfGroup(nullptr) {
 }
 
@@ -644,7 +640,11 @@
 #elif MAJOR_VERSION >= 4
 status_t StreamOutHalHidl::updateSourceMetadata(
         const StreamOutHalInterface::SourceMetadata& sourceMetadata) {
-    CPP_VERSION::SourceMetadata hidlMetadata;
+#if MAJOR_VERSION == 4
+    ::android::hardware::audio::CORE_TYPES_CPP_VERSION::SourceMetadata hidlMetadata;
+#else
+    ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION::SourceMetadata hidlMetadata;
+#endif
     if (status_t status = CoreUtils::sourceMetadataFromHalV7(
                     sourceMetadata.tracks, true /*ignoreNonVendorTags*/, &hidlMetadata);
             status != OK) {
@@ -686,6 +686,7 @@
     // Codec format callback is supported starting from audio HAL V6.0
     return INVALID_OPERATION;
 }
+
 #else
 
 status_t StreamOutHalHidl::getDualMonoMode(audio_dual_mono_mode_t* mode) {
@@ -755,7 +756,7 @@
                     static_cast<TimestretchFallbackMode>(playbackRate.mFallbackMode)}));
 }
 
-#include PATH(android/hardware/audio/FILE_VERSION/IStreamOutEventCallback.h)
+#include PATH(android/hardware/audio/CORE_TYPES_FILE_VERSION/IStreamOutEventCallback.h)
 
 namespace {
 
@@ -791,6 +792,84 @@
 }
 #endif
 
+#if MAJOR_VERSION == 7 && MINOR_VERSION == 1
+using hardware::audio::V7_1::LatencyMode;
+
+status_t StreamOutHalHidl::setLatencyMode(audio_latency_mode_t mode) {
+    if (mStream == 0) return NO_INIT;
+    return processReturn(
+            "setLatencyMode", mStream->setLatencyMode(static_cast<LatencyMode>(mode)));
+};
+
+status_t StreamOutHalHidl::getRecommendedLatencyModes(std::vector<audio_latency_mode_t> *modes) {
+    if (!mStream) return NO_INIT;
+    Result retval;
+    Return<void> ret = mStream->getRecommendedLatencyModes(
+            [&](Result r, hidl_vec<LatencyMode> hidlModes) {
+        retval = r;
+        for (size_t i = 0; i < hidlModes.size(); i++) {
+            modes->push_back(static_cast<audio_latency_mode_t>(hidlModes[i]));
+        }
+    });
+    return processReturn("getRecommendedLatencyModes", ret, retval);
+};
+
+#include PATH(android/hardware/audio/FILE_VERSION/IStreamOutLatencyModeCallback.h)
+
+using hardware::audio::V7_1::IStreamOutLatencyModeCallback;
+
+namespace {
+struct StreamOutLatencyModeCallback : public IStreamOutLatencyModeCallback {
+    StreamOutLatencyModeCallback(const wp<StreamOutHalHidl>& stream) : mStream(stream) {}
+
+    // IStreamOutLatencyModeCallback implementation
+    Return<void> onRecommendedLatencyModeChanged(const hidl_vec<LatencyMode>& hidlModes) override {
+        sp<StreamOutHalHidl> stream = mStream.promote();
+        if (stream != nullptr) {
+            std::vector<audio_latency_mode_t> modes;
+            for (size_t i = 0; i < hidlModes.size(); i++) {
+                modes.push_back(static_cast<audio_latency_mode_t>(hidlModes[i]));
+            }
+            stream->onRecommendedLatencyModeChanged(modes);
+        }
+        return Void();
+    }
+
+  private:
+    wp<StreamOutHalHidl> mStream;
+};
+}  // namespace
+
+status_t StreamOutHalHidl::setLatencyModeCallback(
+        const sp<StreamOutHalInterfaceLatencyModeCallback>& callback) {
+
+    if (mStream == nullptr) return NO_INIT;
+    mLatencyModeCallback = callback;
+    status_t status = processReturn(
+            "setLatencyModeCallback",
+            mStream->setLatencyModeCallback(
+                    callback.get() == nullptr ? nullptr : new StreamOutLatencyModeCallback(this)));
+    return status;
+};
+
+#else
+
+status_t StreamOutHalHidl::setLatencyMode(audio_latency_mode_t mode __unused) {
+    return INVALID_OPERATION;
+};
+
+status_t StreamOutHalHidl::getRecommendedLatencyModes(
+        std::vector<audio_latency_mode_t> *modes __unused) {
+    return INVALID_OPERATION;
+};
+
+status_t StreamOutHalHidl::setLatencyModeCallback(
+        const sp<StreamOutHalInterfaceLatencyModeCallback>& callback __unused) {
+    return INVALID_OPERATION;
+};
+
+#endif
+
 void StreamOutHalHidl::onWriteReady() {
     sp<StreamOutHalInterfaceCallback> callback = mCallback.load().promote();
     if (callback == 0) return;
@@ -819,8 +898,16 @@
     callback->onCodecFormatChanged(metadataBs);
 }
 
+void StreamOutHalHidl::onRecommendedLatencyModeChanged(
+        const std::vector<audio_latency_mode_t>& modes) {
+    sp<StreamOutHalInterfaceLatencyModeCallback> callback = mLatencyModeCallback.load().promote();
+    if (callback == nullptr) return;
+    callback->onRecommendedLatencyModeChanged(modes);
+}
 
-StreamInHalHidl::StreamInHalHidl(const sp<IStreamIn>& stream)
+
+StreamInHalHidl::StreamInHalHidl(
+        const sp<::android::hardware::audio::CORE_TYPES_CPP_VERSION::IStreamIn>& stream)
         : StreamHalHidl(stream.get()), mStream(stream), mReaderClient(0), mEfGroup(nullptr) {
 }
 
@@ -1033,7 +1120,11 @@
 
 status_t StreamInHalHidl::updateSinkMetadata(const
         StreamInHalInterface::SinkMetadata& sinkMetadata) {
-    CPP_VERSION::SinkMetadata hidlMetadata;
+#if MAJOR_VERSION == 4
+    ::android::hardware::audio::CORE_TYPES_CPP_VERSION::SinkMetadata hidlMetadata;
+#else
+    ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION::SinkMetadata hidlMetadata;
+#endif
     if (status_t status = CoreUtils::sinkMetadataFromHalV7(
                     sinkMetadata.tracks, true /*ignoreNonVendorTags*/, &hidlMetadata);
             status != OK) {
@@ -1068,5 +1159,4 @@
 }
 #endif
 
-} // namespace CPP_VERSION
 } // namespace android
diff --git a/media/libaudiohal/impl/StreamHalHidl.h b/media/libaudiohal/impl/StreamHalHidl.h
index 6f5dd04..44bf60a 100644
--- a/media/libaudiohal/impl/StreamHalHidl.h
+++ b/media/libaudiohal/impl/StreamHalHidl.h
@@ -19,30 +19,29 @@
 
 #include <atomic>
 
-#include PATH(android/hardware/audio/FILE_VERSION/IStream.h)
-#include PATH(android/hardware/audio/FILE_VERSION/IStreamIn.h)
+#include PATH(android/hardware/audio/CORE_TYPES_FILE_VERSION/IStream.h)
+#include PATH(android/hardware/audio/CORE_TYPES_FILE_VERSION/IStreamIn.h)
 #include PATH(android/hardware/audio/FILE_VERSION/IStreamOut.h)
 #include <fmq/EventFlag.h>
 #include <fmq/MessageQueue.h>
+#include <media/audiohal/EffectHalInterface.h>
 #include <media/audiohal/StreamHalInterface.h>
 #include <mediautils/Synchronization.h>
 
 #include "ConversionHelperHidl.h"
 #include "StreamPowerLog.h"
 
-using ::android::hardware::audio::CPP_VERSION::IStream;
-using ::android::hardware::audio::CPP_VERSION::IStreamIn;
-using ::android::hardware::audio::CPP_VERSION::IStreamOut;
+using ::android::hardware::audio::CORE_TYPES_CPP_VERSION::IStream;
 using ::android::hardware::EventFlag;
 using ::android::hardware::MessageQueue;
 using ::android::hardware::Return;
-using ReadParameters = ::android::hardware::audio::CPP_VERSION::IStreamIn::ReadParameters;
-using ReadStatus = ::android::hardware::audio::CPP_VERSION::IStreamIn::ReadStatus;
+using ReadParameters =
+        ::android::hardware::audio::CORE_TYPES_CPP_VERSION::IStreamIn::ReadParameters;
+using ReadStatus = ::android::hardware::audio::CORE_TYPES_CPP_VERSION::IStreamIn::ReadStatus;
 using WriteCommand = ::android::hardware::audio::CPP_VERSION::IStreamOut::WriteCommand;
 using WriteStatus = ::android::hardware::audio::CPP_VERSION::IStreamOut::WriteStatus;
 
 namespace android {
-namespace CPP_VERSION {
 
 class DeviceHalHidl;
 
@@ -191,6 +190,13 @@
     // Methods used by StreamCodecFormatCallback (HIDL).
     void onCodecFormatChanged(const std::basic_string<uint8_t>& metadataBs);
 
+    status_t setLatencyMode(audio_latency_mode_t mode) override;
+    status_t getRecommendedLatencyModes(std::vector<audio_latency_mode_t> *modes) override;
+    status_t setLatencyModeCallback(
+            const sp<StreamOutHalInterfaceLatencyModeCallback>& callback) override;
+
+    void onRecommendedLatencyModeChanged(const std::vector<audio_latency_mode_t>& modes);
+
   private:
     friend class DeviceHalHidl;
     typedef MessageQueue<WriteCommand, hardware::kSynchronizedReadWrite> CommandMQ;
@@ -199,7 +205,9 @@
 
     mediautils::atomic_wp<StreamOutHalInterfaceCallback> mCallback;
     mediautils::atomic_wp<StreamOutHalInterfaceEventCallback> mEventCallback;
-    const sp<IStreamOut> mStream;
+    mediautils::atomic_wp<StreamOutHalInterfaceLatencyModeCallback> mLatencyModeCallback;
+
+    const sp<::android::hardware::audio::CPP_VERSION::IStreamOut> mStream;
     std::unique_ptr<CommandMQ> mCommandMQ;
     std::unique_ptr<DataMQ> mDataMQ;
     std::unique_ptr<StatusMQ> mStatusMQ;
@@ -207,7 +215,7 @@
     EventFlag* mEfGroup;
 
     // Can not be constructed directly by clients.
-    StreamOutHalHidl(const sp<IStreamOut>& stream);
+    StreamOutHalHidl(const sp<::android::hardware::audio::CPP_VERSION::IStreamOut>& stream);
 
     virtual ~StreamOutHalHidl();
 
@@ -255,7 +263,7 @@
     typedef MessageQueue<uint8_t, hardware::kSynchronizedReadWrite> DataMQ;
     typedef MessageQueue<ReadStatus, hardware::kSynchronizedReadWrite> StatusMQ;
 
-    const sp<IStreamIn> mStream;
+    const sp<::android::hardware::audio::CORE_TYPES_CPP_VERSION::IStreamIn> mStream;
     std::unique_ptr<CommandMQ> mCommandMQ;
     std::unique_ptr<DataMQ> mDataMQ;
     std::unique_ptr<StatusMQ> mStatusMQ;
@@ -263,7 +271,8 @@
     EventFlag* mEfGroup;
 
     // Can not be constructed directly by clients.
-    StreamInHalHidl(const sp<IStreamIn>& stream);
+    StreamInHalHidl(
+            const sp<::android::hardware::audio::CORE_TYPES_CPP_VERSION::IStreamIn>& stream);
 
     virtual ~StreamInHalHidl();
 
@@ -273,7 +282,6 @@
     status_t prepareForReading(size_t bufferSize);
 };
 
-} // namespace CPP_VERSION
 } // namespace android
 
 #endif // ANDROID_HARDWARE_STREAM_HAL_HIDL_H
diff --git a/media/libaudiohal/impl/StreamHalLocal.cpp b/media/libaudiohal/impl/StreamHalLocal.cpp
index 11fac61..477f510 100644
--- a/media/libaudiohal/impl/StreamHalLocal.cpp
+++ b/media/libaudiohal/impl/StreamHalLocal.cpp
@@ -27,7 +27,6 @@
 #include "StreamHalLocal.h"
 
 namespace android {
-namespace CPP_VERSION {
 
 StreamHalLocal::StreamHalLocal(audio_stream_t *stream, sp<DeviceHalLocal> device)
         : mDevice(device),
@@ -518,7 +517,4 @@
 }
 #endif
 
-} // namespace CPP_VERSION
 } // namespace android
-
-
diff --git a/media/libaudiohal/impl/StreamHalLocal.h b/media/libaudiohal/impl/StreamHalLocal.h
index 493c521..770137f 100644
--- a/media/libaudiohal/impl/StreamHalLocal.h
+++ b/media/libaudiohal/impl/StreamHalLocal.h
@@ -21,7 +21,6 @@
 #include "StreamPowerLog.h"
 
 namespace android {
-namespace CPP_VERSION {
 
 class DeviceHalLocal;
 
@@ -169,6 +168,18 @@
 
     status_t setEventCallback(const sp<StreamOutHalInterfaceEventCallback>& callback) override;
 
+    status_t setLatencyMode(audio_latency_mode_t mode __unused) override {
+        return INVALID_OPERATION;
+    }
+    status_t getRecommendedLatencyModes(
+            std::vector<audio_latency_mode_t> *modes __unused) override {
+        return INVALID_OPERATION;
+    }
+    status_t setLatencyModeCallback(
+            const sp<StreamOutHalInterfaceLatencyModeCallback>& callback __unused) override {
+        return INVALID_OPERATION;
+    }
+
   private:
     audio_stream_out_t *mStream;
     wp<StreamOutHalInterfaceCallback> mCallback;
@@ -246,7 +257,6 @@
     void doUpdateSinkMetadataV7(const SinkMetadata& sinkMetadata);
 };
 
-} // namespace CPP_VERSION
 } // namespace android
 
 #endif // ANDROID_HARDWARE_STREAM_HAL_LOCAL_H
diff --git a/media/libaudiohal/impl/StreamPowerLog.h b/media/libaudiohal/impl/StreamPowerLog.h
index f6a554b..c08ee47 100644
--- a/media/libaudiohal/impl/StreamPowerLog.h
+++ b/media/libaudiohal/impl/StreamPowerLog.h
@@ -24,7 +24,6 @@
 #include <system/audio.h>
 
 namespace android {
-namespace CPP_VERSION {
 
 class StreamPowerLog {
 public:
@@ -99,7 +98,6 @@
     size_t mFrameSize;
 };
 
-} // namespace CPP_VERSION
 } // namespace android
 
 #endif // ANDROID_HARDWARE_STREAM_POWER_LOG_H
diff --git a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
index 69cbcec..f0a0b29 100644
--- a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
@@ -120,6 +120,9 @@
     virtual status_t removeDeviceEffect(
             audio_port_handle_t device, sp<EffectHalInterface> effect) = 0;
 
+    // Update the connection status of an external device.
+    virtual status_t setConnectedState(const struct audio_port_v7 *port, bool connected) = 0;
+
     virtual status_t dump(int fd, const Vector<String16>& args) = 0;
 
   protected:
diff --git a/media/libaudiohal/include/media/audiohal/EffectHalInterface.h b/media/libaudiohal/include/media/audiohal/EffectHalInterface.h
index 03165bd..2969c92 100644
--- a/media/libaudiohal/include/media/audiohal/EffectHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/EffectHalInterface.h
@@ -57,6 +57,9 @@
 
     virtual status_t dump(int fd) = 0;
 
+    // Unique effect ID to use with the core HAL.
+    virtual uint64_t effectId() const = 0;
+
   protected:
     // Subclasses can not be constructed directly by clients.
     EffectHalInterface() {}
diff --git a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
index 2b5b2db..e12fe77 100644
--- a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
@@ -117,6 +117,18 @@
     virtual ~StreamOutHalInterfaceEventCallback() {}
 };
 
+class StreamOutHalInterfaceLatencyModeCallback : public virtual RefBase {
+public:
+    /**
+     * Called with the new list of supported latency modes when a change occurs.
+     */
+    virtual void onRecommendedLatencyModeChanged(std::vector<audio_latency_mode_t> modes) = 0;
+
+protected:
+    StreamOutHalInterfaceLatencyModeCallback() {}
+    virtual ~StreamOutHalInterfaceLatencyModeCallback() {}
+};
+
 class StreamOutHalInterface : public virtual StreamHalInterface {
   public:
     // Return the audio hardware driver estimated latency in milliseconds.
@@ -194,6 +206,42 @@
 
     virtual status_t setEventCallback(const sp<StreamOutHalInterfaceEventCallback>& callback) = 0;
 
+    /**
+     * Indicates the requested latency mode for this output stream.
+     *
+     * The requested mode can be one of the modes returned by
+     * getRecommendedLatencyModes() API.
+     *
+     * @param mode the requested latency mode.
+     * @return operation completion status.
+     */
+    virtual status_t setLatencyMode(audio_latency_mode_t mode) = 0;
+
+    /**
+     * Indicates which latency modes are currently supported on this output stream.
+     * If the transport protocol (e.g Bluetooth A2DP) used by this output stream to reach
+     * the output device supports variable latency modes, the HAL indicates which
+     * modes are currently supported.
+     * The framework can then call setLatencyMode() with one of the supported modes to select
+     * the desired operation mode.
+     *
+     * @param modes currrently supported latency modes.
+     * @return operation completion status.
+     */
+    virtual status_t getRecommendedLatencyModes(std::vector<audio_latency_mode_t> *modes) = 0;
+
+    /**
+     * Set the callback interface for notifying changes in supported latency modes.
+     *
+     * Calling this method with a null pointer will result in releasing
+     * the callback.
+     *
+     * @param callback the registered callback or null to unregister.
+     * @return operation completion status.
+     */
+    virtual status_t setLatencyModeCallback(
+            const sp<StreamOutHalInterfaceLatencyModeCallback>& callback) = 0;
+
   protected:
     virtual ~StreamOutHalInterface() {}
 };
diff --git a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Control.cpp b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Control.cpp
index 1eadd27..ccef5ab 100644
--- a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Control.cpp
+++ b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Control.cpp
@@ -135,7 +135,6 @@
     LVM_UINT32 fs =
             (LVM_UINT32)LVEQNB_SampleRateTab[(LVM_UINT16)pParams->SampleRate]; /* Sample rate */
     LVM_UINT32 fc;     /* Filter centre frequency */
-    LVM_INT16 QFactor; /* Filter Q factor */
 
     pInstance->NBands = pParams->NBands;
 
@@ -144,7 +143,6 @@
          * Get the filter settings
          */
         fc = (LVM_UINT32)pParams->pBandDefinition[i].Frequency; /* Get the band centre frequency */
-        QFactor = (LVM_INT16)pParams->pBandDefinition[i].QFactor; /* Get the band Q factor */
 
         pInstance->pBiquadType[i] = LVEQNB_SinglePrecision_Float; /* Default to single precision */
 
diff --git a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Control.cpp b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Control.cpp
index 8e63502..ffed6d4 100644
--- a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Control.cpp
+++ b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Control.cpp
@@ -421,7 +421,6 @@
      * Intermediate variables and temporary values
      */
     LVM_FLOAT T0;
-    LVM_FLOAT D;
     LVM_FLOAT A0;
     LVM_FLOAT B1;
     LVM_FLOAT B2;
@@ -444,9 +443,6 @@
      * Calculating the intermediate values
      */
     T0 = Frequency * LVPSA_Float_TwoPiOnFsTable[Fs]; /* T0 = 2 * Pi * Fc / Fs */
-    D = 3200;                                        /* Floating point value 1.000000 (1*100*2^5) */
-    /* Force D = 1 : the function was originally used for a peaking filter.
-       The D parameter do not exist for a BandPass filter coefficients */
 
     /*
      * Calculate the B2 coefficient
@@ -535,7 +531,6 @@
      * Intermediate variables and temporary values
      */
     LVM_FLOAT T0;
-    LVM_FLOAT D;
     LVM_FLOAT A0;
     LVM_FLOAT B1;
     LVM_FLOAT B2;
@@ -558,9 +553,6 @@
      * Calculating the intermediate values
      */
     T0 = Frequency * LVPSA_Float_TwoPiOnFsTable[Fs]; /* T0 = 2 * Pi * Fc / Fs */
-    D = 3200;                                        /* Floating point value 1.000000 (1*100*2^5) */
-    /* Force D = 1 : the function was originally used for a peaking filter.
-       The D parameter do not exist for a BandPass filter coefficients */
 
     /*
      * Calculate the B2 coefficient
diff --git a/media/libeffects/preprocessing/.clang-format b/media/libeffects/preprocessing/.clang-format
deleted file mode 120000
index f1b4f69..0000000
--- a/media/libeffects/preprocessing/.clang-format
+++ /dev/null
@@ -1 +0,0 @@
-../../../../../build/soong/scripts/system-clang-format
\ No newline at end of file
diff --git a/media/libeffects/preprocessing/tests/correlation.cpp b/media/libeffects/preprocessing/tests/correlation.cpp
index eb56fc3..0853673 100644
--- a/media/libeffects/preprocessing/tests/correlation.cpp
+++ b/media/libeffects/preprocessing/tests/correlation.cpp
@@ -36,7 +36,7 @@
                                                                    const int16_t* sigY, int len,
                                                                    int16_t enableCrossCorr) {
     float maxCorrVal = 0.f, prevCorrVal = 0.f;
-    int delay = 0, peakIndex = 0, flag = 0;
+    int peakIndex = 0, flag = 0;
     int loopLim = (1 == enableCrossCorr) ? len : kMinLoopLimitValue;
     std::vector<int> peakIndexVect(kNumPeaks, 0);
     std::vector<float> peakValueVect(kNumPeaks, 0.f);
@@ -47,7 +47,6 @@
         }
         corrVal /= len - i;
         if (corrVal > maxCorrVal) {
-            delay = i;
             maxCorrVal = corrVal;
         }
         // Correlation peaks are expected to be observed at equal intervals. The interval length is
diff --git a/media/libheif/HeifDecoderImpl.cpp b/media/libheif/HeifDecoderImpl.cpp
index 273d91c..50f1bf2 100644
--- a/media/libheif/HeifDecoderImpl.cpp
+++ b/media/libheif/HeifDecoderImpl.cpp
@@ -15,6 +15,7 @@
  */
 
 //#define LOG_NDEBUG 0
+#include "include/HeifDecoderAPI.h"
 #define LOG_TAG "HeifDecoderImpl"
 
 #include "HeifDecoderImpl.h"
@@ -25,6 +26,7 @@
 #include <binder/IMemory.h>
 #include <binder/MemoryDealer.h>
 #include <drm/drm_framework_common.h>
+#include <log/log.h>
 #include <media/mediametadataretriever.h>
 #include <media/stagefright/MediaSource.h>
 #include <media/stagefright/foundation/ADebug.h>
@@ -421,7 +423,13 @@
 
         initFrameInfo(&mSequenceInfo, videoFrame);
 
-        mSequenceLength = atoi(mRetriever->extractMetadata(METADATA_KEY_VIDEO_FRAME_COUNT));
+        const char* frameCount = mRetriever->extractMetadata(METADATA_KEY_VIDEO_FRAME_COUNT);
+        if (frameCount == nullptr) {
+            android_errorWriteWithInfoLog(0x534e4554, "215002587", -1, NULL, 0);
+            ALOGD("No valid sequence information in metadata");
+            return false;
+        }
+        mSequenceLength = atoi(frameCount);
 
         if (defaultInfo == nullptr) {
             defaultInfo = &mSequenceInfo;
@@ -464,7 +472,7 @@
 }
 
 bool HeifDecoderImpl::setOutputColor(HeifColorFormat heifColor) {
-    if (heifColor == mOutputColor) {
+    if (heifColor == (HeifColorFormat)mOutputColor) {
         return true;
     }
 
diff --git a/media/libmedia/Android.bp b/media/libmedia/Android.bp
index e98d7d8..9c1b563 100644
--- a/media/libmedia/Android.bp
+++ b/media/libmedia/Android.bp
@@ -378,12 +378,12 @@
     ],
 
     static_libs: [
-        "resourcemanager_aidl_interface-ndk_platform",
+        "resourcemanager_aidl_interface-ndk",
         "framework-permission-aidl-cpp",
     ],
 
     export_static_lib_headers: [
-        "resourcemanager_aidl_interface-ndk_platform",
+        "resourcemanager_aidl_interface-ndk",
         "framework-permission-aidl-cpp",
     ],
 
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index 1c9b9e4..5215c1b 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -949,6 +949,9 @@
         mVideoWidth = ext1;
         mVideoHeight = ext2;
         break;
+    case MEDIA_STARTED:
+        ALOGV("Received media started message");
+        break;
     case MEDIA_NOTIFY_TIME:
         ALOGV("Received notify time message");
         break;
diff --git a/media/libmedia/xsd/vts/OWNERS b/media/libmedia/xsd/vts/OWNERS
new file mode 100644
index 0000000..9af2eba
--- /dev/null
+++ b/media/libmedia/xsd/vts/OWNERS
@@ -0,0 +1,2 @@
+# Bug component: 151862
+sundongahn@google.com
diff --git a/media/libmediahelper/Android.bp b/media/libmediahelper/Android.bp
index 9b54199..165a8ad 100644
--- a/media/libmediahelper/Android.bp
+++ b/media/libmediahelper/Android.bp
@@ -20,7 +20,7 @@
     },
     apex_available: [
         "//apex_available:platform",
-        "com.android.bluetooth.updatable",
+        "com.android.bluetooth",
         "com.android.media",
         "com.android.media.swcodec",
     ],
@@ -29,6 +29,7 @@
 cc_library {
     name: "libmedia_helper",
     vendor_available: true,
+    min_sdk_version: "29",
     vndk: {
         enabled: true,
     },
@@ -43,7 +44,10 @@
         "-Wextra",
         "-Wall",
     ],
-    shared_libs: ["libutils", "liblog"],
+    shared_libs: [
+        "libutils",
+        "liblog",
+    ],
     header_libs: [
         "libmedia_helper_headers",
         "libaudio_system_headers",
@@ -51,11 +55,16 @@
     export_header_lib_headers: [
         "libmedia_helper_headers",
     ],
-    clang: true,
+
     host_supported: true,
     target: {
         darwin: {
             enabled: false,
         },
     },
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media",
+        "test_com.android.media",
+    ],
 }
diff --git a/media/libmediahelper/AudioValidator.cpp b/media/libmediahelper/AudioValidator.cpp
index 7eddbe1..5a0d517 100644
--- a/media/libmediahelper/AudioValidator.cpp
+++ b/media/libmediahelper/AudioValidator.cpp
@@ -47,8 +47,7 @@
         const effect_descriptor_t& desc, std::string_view bugNumber)
 {
     status_t status = NO_ERROR;
-    if (checkStringOverflow(desc.name)
-        | /* always */ checkStringOverflow(desc.implementor)) {
+    if (checkStringOverflow(desc.name) || checkStringOverflow(desc.implementor)) {
         status = BAD_VALUE;
     }
     return safetyNetLog(status, bugNumber);
diff --git a/media/libmediametrics/Android.bp b/media/libmediametrics/Android.bp
index d758391..4a3973e6 100644
--- a/media/libmediametrics/Android.bp
+++ b/media/libmediametrics/Android.bp
@@ -65,6 +65,7 @@
         "//frameworks/base/apex/media/framework",
         "//frameworks/base/core/jni",
         "//frameworks/base/media/jni",
+        "//packages/modules/Media/apex/framework",
     ],
 }
 
diff --git a/media/libmediaplayerservice/Android.bp b/media/libmediaplayerservice/Android.bp
index f55678d..c416a51 100644
--- a/media/libmediaplayerservice/Android.bp
+++ b/media/libmediaplayerservice/Android.bp
@@ -65,6 +65,7 @@
         "libstagefright_foundation",
         "libstagefright_httplive",
         "libutils",
+        "packagemanager_aidl-cpp",
     ],
 
     header_libs: [
diff --git a/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp b/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
index 2aabd53..8c86e16 100644
--- a/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
+++ b/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
@@ -225,10 +225,26 @@
             "media.stagefright.thumbnail.prefer_hw_codecs", false);
     uint32_t flags = preferhw ? 0 : MediaCodecList::kPreferSoftwareCodecs;
     Vector<AString> matchingCodecs;
+    sp<AMessage> format = new AMessage;
+    status_t err = convertMetaDataToMessage(trackMeta, &format);
+    if (err != OK) {
+        format = NULL;
+    }
+
+    // If decoding thumbnail check decoder supports thumbnail dimensions instead
+    int32_t thumbHeight, thumbWidth;
+    if (thumbnail && format != NULL
+            && trackMeta->findInt32(kKeyThumbnailHeight, &thumbHeight)
+            && trackMeta->findInt32(kKeyThumbnailWidth, &thumbWidth)) {
+        format->setInt32("height", thumbHeight);
+        format->setInt32("width", thumbWidth);
+    }
+
     MediaCodecList::findMatchingCodecs(
             mime,
             false, /* encoder */
             flags,
+            format,
             &matchingCodecs);
 
     for (size_t i = 0; i < matchingCodecs.size(); ++i) {
@@ -348,11 +364,18 @@
     bool preferhw = property_get_bool(
             "media.stagefright.thumbnail.prefer_hw_codecs", false);
     uint32_t flags = preferhw ? 0 : MediaCodecList::kPreferSoftwareCodecs;
+    sp<AMessage> format = new AMessage;
+    status_t err = convertMetaDataToMessage(trackMeta, &format);
+    if (err != OK) {
+        format = NULL;
+    }
+
     Vector<AString> matchingCodecs;
     MediaCodecList::findMatchingCodecs(
             mime,
             false, /* encoder */
             flags,
+            format,
             &matchingCodecs);
 
     for (size_t i = 0; i < matchingCodecs.size(); ++i) {
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index bffd7b3..6347b7a 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -16,6 +16,8 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "StagefrightRecorder"
+#define ATRACE_TAG ATRACE_TAG_VIDEO
+#include <utils/Trace.h>
 #include <inttypes.h>
 // TODO/workaround: including base logging now as it conflicts with ADebug.h
 // and it must be included first.
@@ -1856,6 +1858,7 @@
 // Set up the appropriate MediaSource depending on the chosen option
 status_t StagefrightRecorder::setupMediaSource(
                       sp<MediaSource> *mediaSource) {
+    ATRACE_CALL();
     if (mVideoSource == VIDEO_SOURCE_DEFAULT
             || mVideoSource == VIDEO_SOURCE_CAMERA) {
         sp<CameraSource> cameraSource;
@@ -1936,6 +1939,7 @@
 status_t StagefrightRecorder::setupVideoEncoder(
         const sp<MediaSource> &cameraSource,
         sp<MediaCodecSource> *source) {
+    ATRACE_CALL();
     source->clear();
 
     sp<AMessage> format = new AMessage();
@@ -2114,6 +2118,7 @@
 }
 
 status_t StagefrightRecorder::setupAudioEncoder(const sp<MediaWriter>& writer) {
+    ATRACE_CALL();
     status_t status = BAD_VALUE;
     if (OK != (status = checkAudioEncoderCapabilities())) {
         return status;
diff --git a/media/libmediaplayerservice/tests/Android.bp b/media/libmediaplayerservice/tests/Android.bp
index 98626fd..99202b8 100644
--- a/media/libmediaplayerservice/tests/Android.bp
+++ b/media/libmediaplayerservice/tests/Android.bp
@@ -30,7 +30,7 @@
     ],
 
     static_libs: [
-        "resourcemanager_aidl_interface-ndk_platform",
+        "resourcemanager_aidl_interface-ndk",
     ],
 
     include_dirs: [
diff --git a/media/libmediatranscoding/Android.bp b/media/libmediatranscoding/Android.bp
index 042850c..937650f 100644
--- a/media/libmediatranscoding/Android.bp
+++ b/media/libmediatranscoding/Android.bp
@@ -106,8 +106,8 @@
     export_include_dirs: ["include"],
 
     static_libs: [
-        "mediatranscoding_aidl_interface-ndk_platform",
-        "resourceobserver_aidl_interface-V1-ndk_platform",
+        "mediatranscoding_aidl_interface-ndk",
+        "resourceobserver_aidl_interface-V1-ndk",
         "libstatslog_media",
     ],
 
diff --git a/media/libmediatranscoding/include/media/ControllerClientInterface.h b/media/libmediatranscoding/include/media/ControllerClientInterface.h
index 9311e2e..ea63da8 100644
--- a/media/libmediatranscoding/include/media/ControllerClientInterface.h
+++ b/media/libmediatranscoding/include/media/ControllerClientInterface.h
@@ -66,7 +66,7 @@
      * Returns false if the session doesn't exist, or the client is already requesting the
      * session. Returns true otherwise.
      */
-    virtual bool addClientUid(ClientIdType clientId, SessionIdType sessionId, uid_t clientUid);
+    virtual bool addClientUid(ClientIdType clientId, SessionIdType sessionId, uid_t clientUid) = 0;
 
     /**
      * Retrieves the (unsorted) list of all clients requesting the session identified by
@@ -81,7 +81,7 @@
      * Returns false if the session doesn't exist. Returns true otherwise.
      */
     virtual bool getClientUids(ClientIdType clientId, SessionIdType sessionId,
-                               std::vector<int32_t>* out_clientUids);
+                               std::vector<int32_t>* out_clientUids) = 0;
 
 protected:
     virtual ~ControllerClientInterface() = default;
diff --git a/media/libmediatranscoding/tests/Android.bp b/media/libmediatranscoding/tests/Android.bp
index 603611a..7a6980f 100644
--- a/media/libmediatranscoding/tests/Android.bp
+++ b/media/libmediatranscoding/tests/Android.bp
@@ -31,7 +31,7 @@
     ],
 
     static_libs: [
-        "mediatranscoding_aidl_interface-ndk_platform",
+        "mediatranscoding_aidl_interface-ndk",
         "libmediatranscoding",
     ],
 
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 1e64538..a4fbbbc 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -3304,10 +3304,12 @@
     if (err != OK) {
         ALOGE("native_window_set_sideband_stream(%p) failed! (err %d).",
                 sidebandHandle, err);
-        return err;
     }
 
-    return OK;
+    native_handle_close(sidebandHandle);
+    native_handle_delete(sidebandHandle);
+
+    return err;
 }
 
 status_t ACodec::setVideoPortFormatType(
@@ -5431,6 +5433,7 @@
                     notify->setInt32("channel-count", params.nChannels);
                     notify->setInt32("sample-rate", params.nSampleRate);
                     notify->setInt32("bitrate", params.nBitRate);
+                    notify->setInt32("aac-profile", params.eAACProfile);
                     break;
                 }
 
@@ -9205,4 +9208,19 @@
     return OK;
 }
 
+status_t ACodec::querySupportedParameters(std::vector<std::string> *names) {
+    if (!names) {
+        return BAD_VALUE;
+    }
+    return OK;
+}
+
+status_t ACodec::subscribeToParameters([[maybe_unused]] const std::vector<std::string> &names) {
+    return OK;
+}
+
+status_t ACodec::unsubscribeFromParameters([[maybe_unused]] const std::vector<std::string> &names) {
+    return OK;
+}
+
 }  // namespace android
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index a052a70..10baec4 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -109,6 +109,7 @@
 
     srcs: [
         "CodecBase.cpp",
+        "DataConverter.cpp",
         "FrameRenderTracker.cpp",
         "MediaCodecListWriter.cpp",
         "SkipCutBuffer.cpp",
@@ -125,6 +126,7 @@
     ],
 
     shared_libs: [
+        "libaudioutils",
         "libgui",
         "libhidlallocatorutils",
         "liblog",
@@ -251,6 +253,40 @@
         ],
     },
 }
+
+cc_library_shared {
+    name: "libstagefright_surface_utils",
+
+    srcs: [
+        "SurfaceUtils.cpp",
+    ],
+
+    shared_libs: [
+        "libgui",
+        "liblog",
+        "libui",
+        "libutils",
+    ],
+
+    export_include_dirs: [
+        "include",
+    ],
+
+    cflags: [
+        "-Wno-multichar",
+        "-Werror",
+        "-Wall",
+    ],
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+}
+
 cc_library {
     name: "libstagefright",
 
@@ -266,7 +302,6 @@
         "CallbackMediaSource.cpp",
         "CameraSource.cpp",
         "CameraSourceTimeLapse.cpp",
-        "DataConverter.cpp",
         "FrameDecoder.cpp",
         "HevcUtils.cpp",
         "InterfaceUtils.cpp",
@@ -340,6 +375,7 @@
         "android.hardware.media.omx@1.0",
         "framework-permission-aidl-cpp",
         "libaudioclient_aidl_conversion",
+        "packagemanager_aidl-cpp",
     ],
 
     static_libs: [
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index 95afa62..9607425 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -564,9 +564,11 @@
     // Set the preview display. Skip this if mSurface is null because
     // applications may already set a surface to the camera.
     if (mSurface != NULL) {
-        // This CHECK is good, since we just passed the lock/unlock
-        // check earlier by calling mCamera->setParameters().
-        CHECK_EQ((status_t)OK, mCamera->setPreviewTarget(mSurface));
+        // Surface may be set incorrectly or could already be used even if we just
+        // passed the lock/unlock check earlier by calling mCamera->setParameters().
+        if ((err = mCamera->setPreviewTarget(mSurface)) != OK) {
+            return err;
+        }
     }
 
     // Use buffer queue to receive video buffers from camera
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
index a241e29..2d29853 100644
--- a/media/libstagefright/FrameDecoder.cpp
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -35,6 +35,7 @@
 #include <media/stagefright/FrameCaptureProcessor.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaCodec.h>
+#include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/Utils.h>
@@ -192,6 +193,13 @@
             *dstBpp = 4;
             return true;
         }
+        case HAL_PIXEL_FORMAT_RGBA_1010102:
+        {
+            *dstFormat = (OMX_COLOR_FORMATTYPE)COLOR_Format32bitABGR2101010;
+            *captureFormat = ui::PixelFormat::RGBA_1010102;
+            *dstBpp = 4;
+            return true;
+        }
         default:
         {
             ALOGE("Unsupported color format: %d", colorFormat);
@@ -262,13 +270,10 @@
 }
 
 bool isHDR(const sp<AMessage> &format) {
-    uint32_t standard, range, transfer;
+    uint32_t standard, transfer;
     if (!format->findInt32("color-standard", (int32_t*)&standard)) {
         standard = 0;
     }
-    if (!format->findInt32("color-range", (int32_t*)&range)) {
-        range = 0;
-    }
     if (!format->findInt32("color-transfer", (int32_t*)&transfer)) {
         transfer = 0;
     }
@@ -343,6 +348,10 @@
     status_t err = OK;
     bool done = false;
     size_t retriesLeft = kRetryCount;
+    if (!mDecoder) {
+        ALOGE("decoder is not initialized");
+        return NO_INIT;
+    }
     do {
         size_t index;
         int64_t ptsUs = 0LL;
@@ -526,8 +535,12 @@
         return NULL;
     }
 
-    // TODO: Use Flexible color instead
-    videoFormat->setInt32("color-format", OMX_COLOR_FormatYUV420Planar);
+    if (dstFormat() == COLOR_Format32bitABGR2101010) {
+        videoFormat->setInt32("color-format", COLOR_FormatYUVP010);
+    } else {
+        // TODO: Use Flexible color instead
+        videoFormat->setInt32("color-format", OMX_COLOR_FormatYUV420Planar);
+    }
 
     // For the thumbnail extraction case, try to allocate single buffer in both
     // input and output ports, if seeking to a sync frame. NOTE: This request may
@@ -635,6 +648,11 @@
         crop_bottom = height - 1;
     }
 
+    int32_t slice_height;
+    if (outputFormat->findInt32("slice-height", &slice_height) && slice_height > 0) {
+        height = slice_height;
+    }
+
     if (mFrame == NULL) {
         sp<IMemory> frameMem = allocVideoFrame(
                 trackMeta(),
@@ -834,8 +852,12 @@
         return NULL;
     }
 
-    // TODO: Use Flexible color instead
-    videoFormat->setInt32("color-format", OMX_COLOR_FormatYUV420Planar);
+    if (dstFormat() == COLOR_Format32bitABGR2101010) {
+        videoFormat->setInt32("color-format", COLOR_FormatYUVP010);
+    } else {
+        // TODO: Use Flexible color instead
+        videoFormat->setInt32("color-format", OMX_COLOR_FormatYUV420Planar);
+    }
 
     if ((mGridRows == 1) && (mGridCols == 1)) {
         videoFormat->setInt32("android._num-input-buffers", 1);
@@ -941,6 +963,11 @@
         crop_bottom = height - 1;
     }
 
+    int32_t slice_height;
+    if (outputFormat->findInt32("slice-height", &slice_height) && slice_height > 0) {
+        height = slice_height;
+    }
+
     int32_t crop_width, crop_height;
     crop_width = crop_right - crop_left + 1;
     crop_height = crop_bottom - crop_top + 1;
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index 7c7fcac..df4ff47 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -36,6 +36,7 @@
 #include <media/stagefright/MediaSource.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/ALookup.h>
 #include <media/stagefright/foundation/AUtils.h>
 #include <media/stagefright/foundation/ByteUtils.h>
 #include <media/stagefright/foundation/ColorUtils.h>
@@ -44,6 +45,7 @@
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MetaData.h>
 #include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/Utils.h>
 #include <media/mediarecorder.h>
@@ -156,7 +158,7 @@
     bool isHeic() const { return mIsHeic; }
     bool isAudio() const { return mIsAudio; }
     bool isMPEG4() const { return mIsMPEG4; }
-    bool usePrefix() const { return mIsAvc || mIsHevc || mIsHeic; }
+    bool usePrefix() const { return mIsAvc || mIsHevc || mIsHeic || mIsDovi; }
     bool isExifData(MediaBufferBase *buffer, uint32_t *tiffHdrOffset) const;
     void addChunkOffset(off64_t offset);
     void addItemOffsetAndSize(off64_t offset, size_t size, bool isExif);
@@ -164,6 +166,7 @@
     TrackId& getTrackId() { return mTrackId; }
     status_t dump(int fd, const Vector<String16>& args) const;
     static const char *getFourCCForMime(const char *mime);
+    const char *getDoviFourCC() const;
     const char *getTrackType() const;
     void resetInternal();
     int64_t trackMetaDataSize();
@@ -316,6 +319,7 @@
     volatile bool mStarted;
     bool mIsAvc;
     bool mIsHevc;
+    bool mIsDovi;
     bool mIsAudio;
     bool mIsVideo;
     bool mIsHeic;
@@ -370,6 +374,8 @@
     uint8_t mProfileCompatible;
     uint8_t mLevelIdc;
 
+    int32_t mDoviProfile;
+
     void *mCodecSpecificData;
     size_t mCodecSpecificDataSize;
     bool mGotAllCodecSpecificData;
@@ -422,6 +428,8 @@
     status_t parseHEVCCodecSpecificData(
             const uint8_t *data, size_t size, HevcParameterSets &paramSets);
 
+    status_t getDolbyVisionProfile();
+
     // Track authoring progress status
     void trackProgressStatus(int64_t timeUs, status_t err = OK);
     void initTrackingProgressStatus(MetaData *params);
@@ -459,6 +467,7 @@
     void writePaspBox();
     void writeAvccBox();
     void writeHvccBox();
+    void writeDoviConfigBox();
     void writeUrlBox();
     void writeDrefBox();
     void writeDinfBox();
@@ -617,6 +626,17 @@
     return OK;
 }
 
+const char *MPEG4Writer::Track::getDoviFourCC() const {
+    if (mDoviProfile == DolbyVisionProfileDvheStn) {
+        return "dvh1";
+    } else if (mDoviProfile == DolbyVisionProfileDvheSt) {
+        return "hvc1";
+    } else if (mDoviProfile == DolbyVisionProfileDvavSe) {
+        return "avc1";
+    }
+    return nullptr;
+}
+
 // static
 const char *MPEG4Writer::Track::getFourCCForMime(const char *mime) {
     if (mime == NULL) {
@@ -671,7 +691,14 @@
         mIsBackgroundMode |= isBackgroundMode;
     }
 
-    if (Track::getFourCCForMime(mime) == NULL) {
+    if (!strcmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
+        // For MEDIA_MIMETYPE_VIDEO_DOLBY_VISION,
+        // getFourCCForMime() requires profile information
+        // to decide the final FourCC codes.
+        // So we let the creation of the new track now and
+        // assign FourCC codes later using getDoviFourCC()
+        ALOGV("Add source mime '%s'", mime);
+    } else if (Track::getFourCCForMime(mime) == NULL) {
         ALOGE("Unsupported mime '%s'", mime);
         return ERROR_UNSUPPORTED;
     }
@@ -2150,6 +2177,7 @@
       mMinCttsOffsetTimeUs(0),
       mMinCttsOffsetTicks(0),
       mMaxCttsOffsetTicks(0),
+      mDoviProfile(0),
       mCodecSpecificData(NULL),
       mCodecSpecificDataSize(0),
       mGotAllCodecSpecificData(false),
@@ -2176,6 +2204,7 @@
     mMeta->findCString(kKeyMIMEType, &mime);
     mIsAvc = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC);
     mIsHevc = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC);
+    mIsDovi = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION);
     mIsAudio = !strncasecmp(mime, "audio/", 6);
     mIsVideo = !strncasecmp(mime, "video/", 6);
     mIsHeic = !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC);
@@ -2610,7 +2639,12 @@
                !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC)) {
         mMeta->findData(kKeyHVCC, &type, &data, &size);
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
-        mMeta->findData(kKeyDVCC, &type, &data, &size);
+        getDolbyVisionProfile();
+        if (!mMeta->findData(kKeyAVCC, &type, &data, &size) &&
+                !mMeta->findData(kKeyHVCC, &type, &data, &size)) {
+            ALOGE("Failed: No HVCC/AVCC for Dolby Vision ..\n");
+            return;
+        }
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_MPEG4) ||
                !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) {
         if (mMeta->findData(kKeyESDS, &type, &data, &size)) {
@@ -2651,6 +2685,7 @@
         free(mCodecSpecificData);
         mCodecSpecificData = NULL;
     }
+
 }
 
 void MPEG4Writer::Track::initTrackingProgressStatus(MetaData *params) {
@@ -3329,6 +3364,40 @@
     return OK;
 }
 
+status_t MPEG4Writer::Track::getDolbyVisionProfile() {
+    uint32_t type;
+    const void *data = NULL;
+    size_t size = 0;
+
+    if (!mMeta->findData(kKeyDVCC, &type, &data, &size) &&
+        !mMeta->findData(kKeyDVVC, &type, &data, &size) &&
+        !mMeta->findData(kKeyDVWC, &type, &data, &size)) {
+            ALOGE("Failed getting Dovi config for Dolby Vision %d", (int)size);
+            return ERROR_MALFORMED;
+    }
+    static const ALookup<uint8_t, int32_t> dolbyVisionProfileMap = {
+        {1, DolbyVisionProfileDvavPen},
+        {3, DolbyVisionProfileDvheDen},
+        {4, DolbyVisionProfileDvheDtr},
+        {5, DolbyVisionProfileDvheStn},
+        {6, DolbyVisionProfileDvheDth},
+        {7, DolbyVisionProfileDvheDtb},
+        {8, DolbyVisionProfileDvheSt},
+        {9, DolbyVisionProfileDvavSe},
+        {10, DolbyVisionProfileDvav110}
+    };
+
+    // Dolby Vision profile information is extracted as per
+    // https://dolby.my.salesforce.com/sfc/p/#700000009YuG/a/4u000000l6FB/076wHYEmyEfz09m0V1bo85_25hlUJjaiWTbzorNmYY4
+    uint8_t dv_profile = ((((uint8_t *)data)[2] >> 1) & 0x7f);
+
+    if (!dolbyVisionProfileMap.map(dv_profile, &mDoviProfile)) {
+      ALOGE("Failed to get Dolby Profile from DV Config data");
+      return ERROR_MALFORMED;
+    }
+    return OK;
+}
+
 /*
  * Updates the drift time from the audio track so that
  * the video track can get the updated drift time information
@@ -3474,8 +3543,27 @@
                     err = copyCodecSpecificData((const uint8_t *)buffer->data() + buffer->range_offset(),
                             buffer->range_length());
                 }
+                if (mIsDovi) {
+                    err = getDolbyVisionProfile();
+                    if(err == OK) {
+                        const void *data = NULL;
+                        size_t size = 0;
+                        uint32_t type = 0;
+                        if (mDoviProfile == DolbyVisionProfileDvavSe) {
+                            mMeta->findData(kKeyAVCC, &type, &data, &size);
+                        } else if (mDoviProfile < DolbyVisionProfileDvavSe) {
+                            mMeta->findData(kKeyHVCC, &type, &data, &size);
+                        } else {
+                            ALOGW("DV Profiles > DolbyVisionProfileDvavSe are not supported");
+                            err = ERROR_MALFORMED;
+                        }
+                        if (err == OK && data != NULL &&
+                            copyCodecSpecificData((uint8_t *)data, size) == OK) {
+                                mGotAllCodecSpecificData = true;
+                        }
+                    }
+                }
             }
-
             buffer->release();
             buffer = NULL;
             if (OK != err) {
@@ -4173,6 +4261,7 @@
         !strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG4, mime) ||
         !strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime) ||
         !strcasecmp(MEDIA_MIMETYPE_VIDEO_HEVC, mime) ||
+        !strcasecmp(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, mime) ||
         !strcasecmp(MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC, mime)) {
         if (!mCodecSpecificData ||
             mCodecSpecificDataSize <= 0) {
@@ -4297,7 +4386,13 @@
     const char *mime;
     bool success = mMeta->findCString(kKeyMIMEType, &mime);
     CHECK(success);
-    const char *fourcc = getFourCCForMime(mime);
+    const char *fourcc;
+    if (!strcmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
+        fourcc = getDoviFourCC();
+    } else {
+        fourcc = getFourCCForMime(mime);
+    }
+
     if (fourcc == NULL) {
         ALOGE("Unknown mime type '%s'.", mime);
         TRESPASS();
@@ -4337,6 +4432,15 @@
         writeAvccBox();
     } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_HEVC, mime)) {
         writeHvccBox();
+    } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, mime)) {
+        if (mDoviProfile <= DolbyVisionProfileDvheSt) {
+            writeHvccBox();
+        } else if (mDoviProfile == DolbyVisionProfileDvavSe) {
+            writeAvccBox();
+        } else {
+          TRESPASS("Unsupported Dolby Vision profile");
+        }
+        writeDoviConfigBox();
     }
 
     writePaspBox();
@@ -4349,30 +4453,31 @@
     memset(&aspects, 0, sizeof(aspects));
     // Color metadata may have changed.
     sp<MetaData> meta = mSource->getFormat();
-    // TRICKY: using | instead of || because we want to execute all findInt32-s
-    if (meta->findInt32(kKeyColorPrimaries, (int32_t*)&aspects.mPrimaries)
-            | meta->findInt32(kKeyTransferFunction, (int32_t*)&aspects.mTransfer)
-            | meta->findInt32(kKeyColorMatrix, (int32_t*)&aspects.mMatrixCoeffs)
-            | meta->findInt32(kKeyColorRange, (int32_t*)&aspects.mRange)) {
-        int32_t primaries, transfer, coeffs;
-        bool fullRange;
-        ALOGV("primaries=%s transfer=%s matrix=%s range=%s",
-                asString(aspects.mPrimaries),
-                asString(aspects.mTransfer),
-                asString(aspects.mMatrixCoeffs),
-                asString(aspects.mRange));
-        ColorUtils::convertCodecColorAspectsToIsoAspects(
-                aspects, &primaries, &transfer, &coeffs, &fullRange);
-        mOwner->beginBox("colr");
-        mOwner->writeFourcc("nclx");
-        mOwner->writeInt16(primaries);
-        mOwner->writeInt16(transfer);
-        mOwner->writeInt16(coeffs);
-        mOwner->writeInt8(int8_t(fullRange ? 0x80 : 0x0));
-        mOwner->endBox(); // colr
-    } else {
+    bool findPrimaries = meta->findInt32(kKeyColorPrimaries, (int32_t*)&aspects.mPrimaries);
+    bool findTransfer = meta->findInt32(kKeyTransferFunction, (int32_t*)&aspects.mTransfer);
+    bool findMatrix = meta->findInt32(kKeyColorMatrix, (int32_t*)&aspects.mMatrixCoeffs);
+    bool findRange = meta->findInt32(kKeyColorRange, (int32_t*)&aspects.mRange);
+    if (!findPrimaries && !findTransfer && !findMatrix && !findRange) {
         ALOGV("no color information");
+        return;
     }
+
+    int32_t primaries, transfer, coeffs;
+    bool fullRange;
+    ALOGV("primaries=%s transfer=%s matrix=%s range=%s",
+            asString(aspects.mPrimaries),
+            asString(aspects.mTransfer),
+            asString(aspects.mMatrixCoeffs),
+            asString(aspects.mRange));
+    ColorUtils::convertCodecColorAspectsToIsoAspects(
+            aspects, &primaries, &transfer, &coeffs, &fullRange);
+    mOwner->beginBox("colr");
+    mOwner->writeFourcc("nclx");
+    mOwner->writeInt16(primaries);
+    mOwner->writeInt16(transfer);
+    mOwner->writeInt16(coeffs);
+    mOwner->writeInt8(int8_t(fullRange ? 0x80 : 0x0));
+    mOwner->endBox(); // colr
 }
 
 void MPEG4Writer::Track::writeAudioFourCCBox() {
@@ -4829,12 +4934,11 @@
     mOwner->endBox();  // avcC
 }
 
-
 void MPEG4Writer::Track::writeHvccBox() {
     CHECK(mCodecSpecificData);
     CHECK_GE(mCodecSpecificDataSize, 5u);
 
-    // Patch avcc's lengthSize field to match the number
+    // Patch hvcc's lengthSize field to match the number
     // of bytes we use to indicate the size of a nal unit.
     uint8_t *ptr = (uint8_t *)mCodecSpecificData;
     ptr[21] = (ptr[21] & 0xfc) | (mOwner->useNalLengthFour() ? 3 : 1);
@@ -4843,6 +4947,32 @@
     mOwner->endBox();  // hvcC
 }
 
+void MPEG4Writer::Track::writeDoviConfigBox() {
+    CHECK_NE(mDoviProfile, 0u);
+
+    uint32_t type = 0;
+    const void *data = nullptr;
+    size_t size = 0;
+    // check to see which key has the configuration box.
+    if (mMeta->findData(kKeyDVCC, &type, &data, &size) ||
+        mMeta->findData(kKeyDVVC, &type, &data, &size) ||
+        mMeta->findData(kKeyDVWC, &type, &data, &size)) {
+
+       // if this box is present we write the box, or
+       // this mp4 will be interpreted as a backward
+       // compatible stream.
+        if (mDoviProfile > DolbyVisionProfileDvav110) {
+            mOwner->beginBox("dvwC");
+        } else if (mDoviProfile > DolbyVisionProfileDvheDtb) {
+            mOwner->beginBox("dvvC");
+        } else {
+            mOwner->beginBox("dvcC");
+        }
+        mOwner->write(data, size);
+        mOwner->endBox();  // dvwC/dvvC/dvcC
+    }
+}
+
 void MPEG4Writer::Track::writeD263Box() {
     mOwner->beginBox("d263");
     mOwner->writeInt32(0);  // vendor
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index da581fb..6f1974e 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -112,6 +112,13 @@
 static const char *kCodecCaptureRate = "android.media.mediacodec.capture-rate";
 static const char *kCodecOperatingRate = "android.media.mediacodec.operating-rate";
 static const char *kCodecPriority = "android.media.mediacodec.priority";
+static const char *kCodecConfigColorStandard = "android.media.mediacodec.config-color-standard";
+static const char *kCodecConfigColorRange = "android.media.mediacodec.config-color-range";
+static const char *kCodecConfigColorTransfer = "android.media.mediacodec.config-color-transfer";
+static const char *kCodecParsedColorStandard = "android.media.mediacodec.parsed-color-standard";
+static const char *kCodecParsedColorRange = "android.media.mediacodec.parsed-color-range";
+static const char *kCodecParsedColorTransfer = "android.media.mediacodec.parsed-color-transfer";
+static const char *kCodecHDRMetadataFlags = "android.media.mediacodec.hdr-metadata-flags";
 
 // Min/Max QP before shaping
 static const char *kCodecOriginalVideoQPIMin = "android.media.mediacodec.original-video-qp-i-min";
@@ -748,6 +755,7 @@
       mVideoWidth(0),
       mVideoHeight(0),
       mRotationDegrees(0),
+      mHDRMetadataFlags(0),
       mDequeueInputTimeoutGeneration(0),
       mDequeueInputReplyID(0),
       mDequeueOutputTimeoutGeneration(0),
@@ -898,6 +906,8 @@
         mediametrics_setInt64(mMetricsHandle, kCodecFirstFrameIndexLowLatencyModeOn,
                               mIndexOfFirstFrameWhenLowLatencyOn);
     }
+
+    mediametrics_setInt32(mMetricsHandle, kCodecHDRMetadataFlags, mHDRMetadataFlags);
 #if 0
     // enable for short term, only while debugging
     updateEphemeralMediametrics(mMetricsHandle);
@@ -1408,9 +1418,14 @@
     if (mIsVideo) {
         // video codec needs dedicated looper
         if (mCodecLooper == NULL) {
+            status_t err = OK;
             mCodecLooper = new ALooper;
             mCodecLooper->setName("CodecLooper");
-            mCodecLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
+            err = mCodecLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
+            if (OK != err) {
+                ALOGE("Codec Looper failed to start");
+                return err;
+            }
         }
 
         mCodecLooper->registerHandler(mCodec);
@@ -1568,6 +1583,23 @@
             if (format->findInt32("priority", &priority)) {
                 mediametrics_setInt32(mMetricsHandle, kCodecPriority, priority);
             }
+            int32_t colorStandard = -1;
+            if (format->findInt32(KEY_COLOR_STANDARD, &colorStandard)) {
+                mediametrics_setInt32(mMetricsHandle, kCodecConfigColorStandard, colorStandard);
+            }
+            int32_t colorRange = -1;
+            if (format->findInt32(KEY_COLOR_RANGE, &colorRange)) {
+                mediametrics_setInt32(mMetricsHandle, kCodecConfigColorRange, colorRange);
+            }
+            int32_t colorTransfer = -1;
+            if (format->findInt32(KEY_COLOR_TRANSFER, &colorTransfer)) {
+                mediametrics_setInt32(mMetricsHandle, kCodecConfigColorTransfer, colorTransfer);
+            }
+            HDRStaticInfo info;
+            if (ColorUtils::getHDRStaticInfoFromFormat(format, &info)
+                    && ColorUtils::isHDRStaticInfoValid(&info)) {
+                mHDRMetadataFlags |= kFlagHDRStaticInfo;
+            }
         }
 
         // Prevent possible integer overflow in downstream code.
@@ -2977,8 +3009,9 @@
                     CHECK(msg->findInt32("err", &err));
                     CHECK(msg->findInt32("actionCode", &actionCode));
 
-                    ALOGE("Codec reported err %#x, actionCode %d, while in state %d/%s",
-                            err, actionCode, mState, stateString(mState).c_str());
+                    ALOGE("Codec reported err %#x/%s, actionCode %d, while in state %d/%s",
+                                              err, StrMediaError(err).c_str(), actionCode,
+                                              mState, stateString(mState).c_str());
                     if (err == DEAD_OBJECT) {
                         mFlags |= kFlagSawMediaServerDie;
                         mFlags &= ~kFlagIsComponentAllocated;
@@ -3034,10 +3067,8 @@
                         case STOPPING:
                         {
                             if (mFlags & kFlagSawMediaServerDie) {
-                                bool postPendingReplies = true;
                                 if (mState == RELEASING && !mReplyID) {
                                     ALOGD("Releasing asynchronously, so nothing to reply here.");
-                                    postPendingReplies = false;
                                 }
                                 // MediaServer died, there definitely won't
                                 // be a shutdown complete notification after
@@ -3050,8 +3081,11 @@
                                 if (mState == RELEASING) {
                                     mComponentName.clear();
                                 }
-                                if (postPendingReplies) {
+                                if (mReplyID) {
                                     postPendingRepliesAndDeferredMessages(origin + ":dead");
+                                } else {
+                                    ALOGD("no pending replies: %s:dead following %s",
+                                          origin.c_str(), mLastReplyOrigin.c_str());
                                 }
                                 sendErrorResponse = false;
                             } else if (!mReplyID) {
@@ -3187,8 +3221,11 @@
                         mediametrics_setInt32(mMetricsHandle, kCodecSecure, 0);
                     }
 
-                    if (mIsVideo) {
-                        // audio codec is currently ignored.
+                    MediaCodecInfo::Attributes attr = mCodecInfo
+                            ? mCodecInfo->getAttributes()
+                            : MediaCodecInfo::Attributes(0);
+                    if (!(attr & MediaCodecInfo::kFlagIsSoftwareOnly)) {
+                        // software codec is currently ignored.
                         mResourceManagerProxy->addResource(
                                 MediaResource::CodecResource(mFlags & kFlagIsSecure, mIsVideo));
                     }
@@ -3504,6 +3541,20 @@
 
                 case kWhatDrainThisBuffer:
                 {
+                    if ((mFlags & kFlagUseBlockModel) == 0 && mTunneled) {
+                        sp<RefBase> obj;
+                        CHECK(msg->findObject("buffer", &obj));
+                        sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
+                        if (mFlags & kFlagIsAsync) {
+                            // In asynchronous mode, output format change is processed immediately.
+                            handleOutputFormatChangeIfNeeded(buffer);
+                        } else {
+                            postActivityNotificationIfPossible();
+                        }
+                        mBufferChannel->discardBuffer(buffer);
+                        break;
+                    }
+
                     /* size_t index = */updateBuffers(kPortIndexOutput, msg);
 
                     if (mState == FLUSHING
@@ -4048,26 +4099,29 @@
                 break;
             }
 
-            if (asyncNotify != nullptr) {
-                if (mSurface != NULL) {
-                    if (!mReleaseSurface) {
-                        uint64_t usage = 0;
-                        if (mSurface->getConsumerUsage(&usage) != OK) {
-                            usage = 0;
-                        }
-                        mReleaseSurface.reset(new ReleaseSurface(usage));
+            bool forceSync = false;
+            if (asyncNotify != nullptr && mSurface != NULL) {
+                if (!mReleaseSurface) {
+                    uint64_t usage = 0;
+                    if (mSurface->getConsumerUsage(&usage) != OK) {
+                        usage = 0;
                     }
-                    if (mSurface != mReleaseSurface->getSurface()) {
-                        status_t err = connectToSurface(mReleaseSurface->getSurface());
-                        ALOGW_IF(err != OK, "error connecting to release surface: err = %d", err);
-                        if (err == OK && !(mFlags & kFlagUsesSoftwareRenderer)) {
-                            err = mCodec->setSurface(mReleaseSurface->getSurface());
-                            ALOGW_IF(err != OK, "error setting release surface: err = %d", err);
-                        }
-                        if (err == OK) {
-                            (void)disconnectFromSurface();
-                            mSurface = mReleaseSurface->getSurface();
-                        }
+                    mReleaseSurface.reset(new ReleaseSurface(usage));
+                }
+                if (mSurface != mReleaseSurface->getSurface()) {
+                    status_t err = connectToSurface(mReleaseSurface->getSurface());
+                    ALOGW_IF(err != OK, "error connecting to release surface: err = %d", err);
+                    if (err == OK && !(mFlags & kFlagUsesSoftwareRenderer)) {
+                        err = mCodec->setSurface(mReleaseSurface->getSurface());
+                        ALOGW_IF(err != OK, "error setting release surface: err = %d", err);
+                    }
+                    if (err == OK) {
+                        (void)disconnectFromSurface();
+                        mSurface = mReleaseSurface->getSurface();
+                    } else {
+                        // We were not able to switch the surface, so force
+                        // synchronous release.
+                        forceSync = true;
                     }
                 }
             }
@@ -4091,8 +4145,10 @@
             }
 
             if (asyncNotify != nullptr) {
-                mResourceManagerProxy->markClientForPendingRemoval();
-                postPendingRepliesAndDeferredMessages("kWhatRelease:async");
+                if (!forceSync) {
+                    mResourceManagerProxy->markClientForPendingRemoval();
+                    postPendingRepliesAndDeferredMessages("kWhatRelease:async");
+                }
                 asyncNotifyPost.clear();
                 mAsyncReleaseCompleteNotification = asyncNotify;
             }
@@ -4501,6 +4557,9 @@
             HDRStaticInfo info;
             if (ColorUtils::getHDRStaticInfoFromFormat(mOutputFormat, &info)) {
                 setNativeWindowHdrMetadata(mSurface.get(), &info);
+                if (ColorUtils::isHDRStaticInfoValid(&info)) {
+                    mHDRMetadataFlags |= kFlagHDRStaticInfo;
+                }
             }
         }
 
@@ -4509,6 +4568,7 @@
                 && hdr10PlusInfo != nullptr && hdr10PlusInfo->size() > 0) {
             native_window_set_buffers_hdr10_plus_metadata(mSurface.get(),
                     hdr10PlusInfo->size(), hdr10PlusInfo->data());
+            mHDRMetadataFlags |= kFlagHDR10PlusInfo;
         }
 
         if (mime.startsWithIgnoreCase("video/")) {
@@ -4553,6 +4613,21 @@
             mCrypto->notifyResolution(width, height);
         }
     }
+
+    if (mMetricsHandle != 0) {
+        int32_t colorStandard = -1;
+        if (format->findInt32(KEY_COLOR_STANDARD, &colorStandard)) {
+            mediametrics_setInt32(mMetricsHandle, kCodecParsedColorStandard, colorStandard);
+        }
+        int32_t colorRange = -1;
+        if (format->findInt32( KEY_COLOR_RANGE, &colorRange)) {
+            mediametrics_setInt32(mMetricsHandle, kCodecParsedColorRange, colorRange);
+        }
+        int32_t colorTransfer = -1;
+        if (format->findInt32(KEY_COLOR_TRANSFER, &colorTransfer)) {
+            mediametrics_setInt32(mMetricsHandle, kCodecParsedColorTransfer, colorTransfer);
+        }
+    }
 }
 
 void MediaCodec::extractCSD(const sp<AMessage> &format) {
@@ -4582,7 +4657,6 @@
     mCSD.erase(mCSD.begin());
     std::shared_ptr<C2Buffer> c2Buffer;
     sp<hardware::HidlMemory> memory;
-    size_t offset = 0;
 
     if (mFlags & kFlagUseBlockModel) {
         if (hasCryptoOrDescrambler()) {
@@ -4603,7 +4677,6 @@
             memcpy(mem->unsecurePointer(), csd->data(), csd->size());
             ssize_t heapOffset;
             memory = hardware::fromHeap(mem->getMemory(&heapOffset, nullptr));
-            offset += heapOffset;
         } else {
             std::shared_ptr<C2LinearBlock> block =
                 FetchLinearBlock(csd->size(), {std::string{mComponentName.c_str()}});
diff --git a/media/libstagefright/MediaCodecList.cpp b/media/libstagefright/MediaCodecList.cpp
index 6243828..2ffe728 100644
--- a/media/libstagefright/MediaCodecList.cpp
+++ b/media/libstagefright/MediaCodecList.cpp
@@ -509,6 +509,29 @@
                 }
             }
         }
+
+        int32_t profile = -1;
+        if (format->findInt32("profile", &profile)) {
+            int32_t level = -1;
+            format->findInt32("level", &level);
+            Vector<MediaCodecInfo::ProfileLevel> profileLevels;
+            capabilities->getSupportedProfileLevels(&profileLevels);
+            auto it = profileLevels.begin();
+            for (; it != profileLevels.end(); ++it) {
+                if (profile != it->mProfile) {
+                    continue;
+                }
+                if (level > -1 && level > it->mLevel) {
+                    continue;
+                }
+                break;
+            }
+
+            if (it == profileLevels.end()) {
+                ALOGV("Codec does not support profile %d with level %d", profile, level);
+                return false;
+            }
+        }
     }
 
     // haven't found a reason to discard this one
diff --git a/media/libstagefright/MediaCodecSource.cpp b/media/libstagefright/MediaCodecSource.cpp
index 0107c32..b07f8f7 100644
--- a/media/libstagefright/MediaCodecSource.cpp
+++ b/media/libstagefright/MediaCodecSource.cpp
@@ -943,10 +943,17 @@
 
             sp<MediaCodecBuffer> outbuf;
             status_t err = mEncoder->getOutputBuffer(index, &outbuf);
-            if (err != OK || outbuf == NULL || outbuf->data() == NULL
-                || outbuf->size() == 0) {
+            if (err != OK || outbuf == NULL || outbuf->data() == NULL) {
                 signalEOS();
                 break;
+            } else if (outbuf->size() == 0) {
+                // Zero length CSD buffers are not treated as an error
+                if (flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) {
+                    mEncoder->releaseOutputBuffer(index);
+                } else {
+                    signalEOS();
+                }
+                break;
             }
 
             MediaBufferBase *mbuf = new MediaBuffer(outbuf->size());
diff --git a/media/libstagefright/SkipCutBuffer.cpp b/media/libstagefright/SkipCutBuffer.cpp
index ee9016d..de91533 100644
--- a/media/libstagefright/SkipCutBuffer.cpp
+++ b/media/libstagefright/SkipCutBuffer.cpp
@@ -145,7 +145,19 @@
     if (available < num) {
         int32_t newcapacity = mCapacity + (num - available);
         char * newbuffer = new char[newcapacity];
-        memcpy(newbuffer, mCutBuffer, mCapacity);
+        if (mWriteHead < mReadHead) {
+            // data isn't continuous, need to memcpy twice
+            // to move previous data to new buffer.
+            size_t copyLeft = mCapacity - mReadHead;
+            memcpy(newbuffer, mCutBuffer + mReadHead, copyLeft);
+            memcpy(newbuffer + copyLeft, mCutBuffer, mWriteHead);
+            mReadHead = 0;
+            mWriteHead += copyLeft;
+        } else {
+            memcpy(newbuffer, mCutBuffer + mReadHead, mWriteHead - mReadHead);
+            mWriteHead -= mReadHead;
+            mReadHead = 0;
+        }
         delete [] mCutBuffer;
         mCapacity = newcapacity;
         mCutBuffer = newbuffer;
diff --git a/media/libstagefright/TEST_MAPPING b/media/libstagefright/TEST_MAPPING
index 7ce2968..22885c9 100644
--- a/media/libstagefright/TEST_MAPPING
+++ b/media/libstagefright/TEST_MAPPING
@@ -13,7 +13,18 @@
 
   "presubmit-large": [
     {
-      "name": "CtsMediaTestCases",
+      "name": "CtsMediaMiscTestCases",
+      "options": [
+        {
+          "include-annotation": "android.platform.test.annotations.Presubmit"
+        },
+        {
+          "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
+        }
+      ]
+    },
+    {
+      "name": "CtsMediaAudioTestCases",
       "options": [
         {
           "include-annotation": "android.platform.test.annotations.Presubmit"
@@ -23,10 +34,54 @@
         },
         // TODO: b/149314419
         {
-          "exclude-filter": "android.media.cts.AudioPlaybackCaptureTest"
+          "exclude-filter": "android.media.audio.cts.AudioPlaybackCaptureTest"
         },
         {
-          "exclude-filter": "android.media.cts.AudioRecordTest"
+          "exclude-filter": "android.media.audio.cts.AudioRecordTest"
+        }
+      ]
+    },
+    {
+      "name": "CtsMediaDecoderTestCases",
+      "options": [
+        {
+          "include-annotation": "android.platform.test.annotations.Presubmit"
+        },
+        {
+          "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
+        }
+      ]
+    },
+    {
+      "name": "CtsMediaEncoderTestCases",
+      "options": [
+        {
+          "include-annotation": "android.platform.test.annotations.Presubmit"
+        },
+        {
+          "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
+        }
+      ]
+    },
+    {
+      "name": "CtsMediaCodecTestCases",
+      "options": [
+        {
+          "include-annotation": "android.platform.test.annotations.Presubmit"
+        },
+        {
+          "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
+        }
+      ]
+    },
+    {
+      "name": "CtsMediaPlayerTestCases",
+      "options": [
+        {
+          "include-annotation": "android.platform.test.annotations.Presubmit"
+        },
+        {
+          "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
         }
       ]
     }
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index 4c18f87..4b6470a 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -28,9 +28,6 @@
 #include "include/HevcUtils.h"
 
 #include <cutils/properties.h>
-#include <media/openmax/OMX_Audio.h>
-#include <media/openmax/OMX_Video.h>
-#include <media/openmax/OMX_VideoExt.h>
 #include <media/stagefright/CodecBase.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
@@ -57,6 +54,14 @@
 #define AMEDIAFORMAT_KEY_MPEGH_COMPATIBLE_SETS \
   "mpegh-compatible-sets"
 
+namespace {
+    // TODO: this should possibly be handled in an else
+    constexpr static int32_t AACObjectNull = 0;
+
+    // TODO: decide if we should just not transmit the level in this case
+    constexpr static int32_t DolbyVisionLevelUnknown = 0;
+}
+
 namespace android {
 
 static status_t copyNALUToABuffer(sp<ABuffer> *buffer, const uint8_t *ptr, size_t length) {
@@ -156,21 +161,22 @@
         audioObjectType >>= 11;
     }
 
-    const static ALookup<uint16_t, OMX_AUDIO_AACPROFILETYPE> profiles {
-        { 1,  OMX_AUDIO_AACObjectMain     },
-        { 2,  OMX_AUDIO_AACObjectLC       },
-        { 3,  OMX_AUDIO_AACObjectSSR      },
-        { 4,  OMX_AUDIO_AACObjectLTP      },
-        { 5,  OMX_AUDIO_AACObjectHE       },
-        { 6,  OMX_AUDIO_AACObjectScalable },
-        { 17, OMX_AUDIO_AACObjectERLC     },
-        { 23, OMX_AUDIO_AACObjectLD       },
-        { 29, OMX_AUDIO_AACObjectHE_PS    },
-        { 39, OMX_AUDIO_AACObjectELD      },
-        { 42, OMX_AUDIO_AACObjectXHE      },
+
+    const static ALookup<uint16_t, int32_t> profiles {
+        { 1,  AACObjectMain     },
+        { 2,  AACObjectLC       },
+        { 3,  AACObjectSSR      },
+        { 4,  AACObjectLTP      },
+        { 5,  AACObjectHE       },
+        { 6,  AACObjectScalable },
+        { 17, AACObjectERLC     },
+        { 23, AACObjectLD       },
+        { 29, AACObjectHE_PS    },
+        { 39, AACObjectELD      },
+        { 42, AACObjectXHE      },
     };
 
-    OMX_AUDIO_AACPROFILETYPE profile;
+    int32_t profile;
     if (profiles.map(audioObjectType, &profile)) {
         format->setInt32("profile", profile);
     }
@@ -184,59 +190,92 @@
     const uint8_t constraints = ptr[2];
     const uint8_t level = ptr[3];
 
-    const static ALookup<uint8_t, OMX_VIDEO_AVCLEVELTYPE> levels {
-        {  9, OMX_VIDEO_AVCLevel1b }, // technically, 9 is only used for High+ profiles
-        { 10, OMX_VIDEO_AVCLevel1  },
-        { 11, OMX_VIDEO_AVCLevel11 }, // prefer level 1.1 for the value 11
-        { 11, OMX_VIDEO_AVCLevel1b },
-        { 12, OMX_VIDEO_AVCLevel12 },
-        { 13, OMX_VIDEO_AVCLevel13 },
-        { 20, OMX_VIDEO_AVCLevel2  },
-        { 21, OMX_VIDEO_AVCLevel21 },
-        { 22, OMX_VIDEO_AVCLevel22 },
-        { 30, OMX_VIDEO_AVCLevel3  },
-        { 31, OMX_VIDEO_AVCLevel31 },
-        { 32, OMX_VIDEO_AVCLevel32 },
-        { 40, OMX_VIDEO_AVCLevel4  },
-        { 41, OMX_VIDEO_AVCLevel41 },
-        { 42, OMX_VIDEO_AVCLevel42 },
-        { 50, OMX_VIDEO_AVCLevel5  },
-        { 51, OMX_VIDEO_AVCLevel51 },
-        { 52, OMX_VIDEO_AVCLevel52 },
-        { 60, OMX_VIDEO_AVCLevel6  },
-        { 61, OMX_VIDEO_AVCLevel61 },
-        { 62, OMX_VIDEO_AVCLevel62 },
+    const static ALookup<uint8_t, int32_t> levels {
+        {  9, AVCLevel1b }, // technically, 9 is only used for High+ profiles
+        { 10, AVCLevel1  },
+        { 11, AVCLevel11 }, // prefer level 1.1 for the value 11
+        { 11, AVCLevel1b },
+        { 12, AVCLevel12 },
+        { 13, AVCLevel13 },
+        { 20, AVCLevel2  },
+        { 21, AVCLevel21 },
+        { 22, AVCLevel22 },
+        { 30, AVCLevel3  },
+        { 31, AVCLevel31 },
+        { 32, AVCLevel32 },
+        { 40, AVCLevel4  },
+        { 41, AVCLevel41 },
+        { 42, AVCLevel42 },
+        { 50, AVCLevel5  },
+        { 51, AVCLevel51 },
+        { 52, AVCLevel52 },
+        { 60, AVCLevel6  },
+        { 61, AVCLevel61 },
+        { 62, AVCLevel62 },
     };
-    const static ALookup<uint8_t, OMX_VIDEO_AVCPROFILETYPE> profiles {
-        { 66, OMX_VIDEO_AVCProfileBaseline },
-        { 77, OMX_VIDEO_AVCProfileMain     },
-        { 88, OMX_VIDEO_AVCProfileExtended },
-        { 100, OMX_VIDEO_AVCProfileHigh    },
-        { 110, OMX_VIDEO_AVCProfileHigh10  },
-        { 122, OMX_VIDEO_AVCProfileHigh422 },
-        { 244, OMX_VIDEO_AVCProfileHigh444 },
+    const static ALookup<uint8_t, int32_t> profiles {
+        { 66, AVCProfileBaseline },
+        { 77, AVCProfileMain     },
+        { 88, AVCProfileExtended },
+        { 100, AVCProfileHigh    },
+        { 110, AVCProfileHigh10  },
+        { 122, AVCProfileHigh422 },
+        { 244, AVCProfileHigh444 },
     };
 
     // set profile & level if they are recognized
-    OMX_VIDEO_AVCPROFILETYPE codecProfile;
-    OMX_VIDEO_AVCLEVELTYPE codecLevel;
+    int32_t codecProfile;
+    int32_t codecLevel;
     if (profiles.map(profile, &codecProfile)) {
         if (profile == 66 && (constraints & 0x40)) {
-            codecProfile = (OMX_VIDEO_AVCPROFILETYPE)OMX_VIDEO_AVCProfileConstrainedBaseline;
+            codecProfile = AVCProfileConstrainedBaseline;
         } else if (profile == 100 && (constraints & 0x0C) == 0x0C) {
-            codecProfile = (OMX_VIDEO_AVCPROFILETYPE)OMX_VIDEO_AVCProfileConstrainedHigh;
+            codecProfile = AVCProfileConstrainedHigh;
         }
         format->setInt32("profile", codecProfile);
         if (levels.map(level, &codecLevel)) {
             // for 9 && 11 decide level based on profile and constraint_set3 flag
             if (level == 11 && (profile == 66 || profile == 77 || profile == 88)) {
-                codecLevel = (constraints & 0x10) ? OMX_VIDEO_AVCLevel1b : OMX_VIDEO_AVCLevel11;
+                codecLevel = (constraints & 0x10) ? AVCLevel1b : AVCLevel11;
             }
             format->setInt32("level", codecLevel);
         }
     }
 }
 
+static const ALookup<uint8_t, int32_t>&  getDolbyVisionProfileTable() {
+    static const ALookup<uint8_t, int32_t> profileTable = {
+        {1, DolbyVisionProfileDvavPen},
+        {3, DolbyVisionProfileDvheDen},
+        {4, DolbyVisionProfileDvheDtr},
+        {5, DolbyVisionProfileDvheStn},
+        {6, DolbyVisionProfileDvheDth},
+        {7, DolbyVisionProfileDvheDtb},
+        {8, DolbyVisionProfileDvheSt},
+        {9, DolbyVisionProfileDvavSe},
+        {10, DolbyVisionProfileDvav110},
+    };
+    return profileTable;
+}
+
+static const ALookup<uint8_t, int32_t>&  getDolbyVisionLevelsTable() {
+    static const ALookup<uint8_t, int32_t> levelsTable = {
+        {0, DolbyVisionLevelUnknown},
+        {1, DolbyVisionLevelHd24},
+        {2, DolbyVisionLevelHd30},
+        {3, DolbyVisionLevelFhd24},
+        {4, DolbyVisionLevelFhd30},
+        {5, DolbyVisionLevelFhd60},
+        {6, DolbyVisionLevelUhd24},
+        {7, DolbyVisionLevelUhd30},
+        {8, DolbyVisionLevelUhd48},
+        {9, DolbyVisionLevelUhd60},
+        {10, DolbyVisionLevelUhd120},
+        {11, DolbyVisionLevel8k30},
+        {12, DolbyVisionLevel8k60},
+    };
+    return levelsTable;
+}
 static void parseDolbyVisionProfileLevelFromDvcc(const uint8_t *ptr, size_t size, sp<AMessage> &format) {
     // dv_major.dv_minor Should be 1.0 or 2.1
     if (size != 24 || ((ptr[0] != 1 || ptr[1] != 0) && (ptr[0] != 2 || ptr[1] != 1))) {
@@ -256,41 +295,20 @@
 
     // All Dolby Profiles will have profile and level info in MediaFormat
     // Profile 8 and 9 will have bl_compatibility_id too.
-    const static ALookup<uint8_t, OMX_VIDEO_DOLBYVISIONPROFILETYPE> profiles{
-        {1, OMX_VIDEO_DolbyVisionProfileDvavPen},
-        {3, OMX_VIDEO_DolbyVisionProfileDvheDen},
-        {4, OMX_VIDEO_DolbyVisionProfileDvheDtr},
-        {5, OMX_VIDEO_DolbyVisionProfileDvheStn},
-        {6, OMX_VIDEO_DolbyVisionProfileDvheDth},
-        {7, OMX_VIDEO_DolbyVisionProfileDvheDtb},
-        {8, OMX_VIDEO_DolbyVisionProfileDvheSt},
-        {9, OMX_VIDEO_DolbyVisionProfileDvavSe},
-        {10, OMX_VIDEO_DolbyVisionProfileDvav110},
-    };
+    const ALookup<uint8_t, int32_t> &profiles = getDolbyVisionProfileTable();
+    const ALookup<uint8_t, int32_t> &levels = getDolbyVisionLevelsTable();
 
-    const static ALookup<uint8_t, OMX_VIDEO_DOLBYVISIONLEVELTYPE> levels{
-        {0, OMX_VIDEO_DolbyVisionLevelUnknown},
-        {1, OMX_VIDEO_DolbyVisionLevelHd24},
-        {2, OMX_VIDEO_DolbyVisionLevelHd30},
-        {3, OMX_VIDEO_DolbyVisionLevelFhd24},
-        {4, OMX_VIDEO_DolbyVisionLevelFhd30},
-        {5, OMX_VIDEO_DolbyVisionLevelFhd60},
-        {6, OMX_VIDEO_DolbyVisionLevelUhd24},
-        {7, OMX_VIDEO_DolbyVisionLevelUhd30},
-        {8, OMX_VIDEO_DolbyVisionLevelUhd48},
-        {9, OMX_VIDEO_DolbyVisionLevelUhd60},
-    };
     // set rpuAssoc
     if (rpu_present_flag && el_present_flag && !bl_present_flag) {
         format->setInt32("rpuAssoc", 1);
     }
     // set profile & level if they are recognized
-    OMX_VIDEO_DOLBYVISIONPROFILETYPE codecProfile;
-    OMX_VIDEO_DOLBYVISIONLEVELTYPE codecLevel;
+    int32_t codecProfile;
+    int32_t codecLevel;
     if (profiles.map(profile, &codecProfile)) {
         format->setInt32("profile", codecProfile);
-        if (codecProfile == OMX_VIDEO_DolbyVisionProfileDvheSt ||
-            codecProfile == OMX_VIDEO_DolbyVisionProfileDvavSe) {
+        if (codecProfile == DolbyVisionProfileDvheSt ||
+            codecProfile == DolbyVisionProfileDvavSe) {
             format->setInt32("bl_compatibility_id", bl_compatibility_id);
         }
         if (levels.map(level, &codecLevel)) {
@@ -307,32 +325,32 @@
     const uint8_t profile = ptr[6];
     const uint8_t level = ptr[5];
 
-    const static ALookup<uint8_t, OMX_VIDEO_H263PROFILETYPE> profiles {
-        { 0, OMX_VIDEO_H263ProfileBaseline },
-        { 1, OMX_VIDEO_H263ProfileH320Coding },
-        { 2, OMX_VIDEO_H263ProfileBackwardCompatible },
-        { 3, OMX_VIDEO_H263ProfileISWV2 },
-        { 4, OMX_VIDEO_H263ProfileISWV3 },
-        { 5, OMX_VIDEO_H263ProfileHighCompression },
-        { 6, OMX_VIDEO_H263ProfileInternet },
-        { 7, OMX_VIDEO_H263ProfileInterlace },
-        { 8, OMX_VIDEO_H263ProfileHighLatency },
+    const static ALookup<uint8_t, int32_t> profiles {
+        { 0, H263ProfileBaseline },
+        { 1, H263ProfileH320Coding },
+        { 2, H263ProfileBackwardCompatible },
+        { 3, H263ProfileISWV2 },
+        { 4, H263ProfileISWV3 },
+        { 5, H263ProfileHighCompression },
+        { 6, H263ProfileInternet },
+        { 7, H263ProfileInterlace },
+        { 8, H263ProfileHighLatency },
     };
 
-    const static ALookup<uint8_t, OMX_VIDEO_H263LEVELTYPE> levels {
-        { 10, OMX_VIDEO_H263Level10 },
-        { 20, OMX_VIDEO_H263Level20 },
-        { 30, OMX_VIDEO_H263Level30 },
-        { 40, OMX_VIDEO_H263Level40 },
-        { 45, OMX_VIDEO_H263Level45 },
-        { 50, OMX_VIDEO_H263Level50 },
-        { 60, OMX_VIDEO_H263Level60 },
-        { 70, OMX_VIDEO_H263Level70 },
+    const static ALookup<uint8_t, int32_t> levels {
+        { 10, H263Level10 },
+        { 20, H263Level20 },
+        { 30, H263Level30 },
+        { 40, H263Level40 },
+        { 45, H263Level45 },
+        { 50, H263Level50 },
+        { 60, H263Level60 },
+        { 70, H263Level70 },
     };
 
     // set profile & level if they are recognized
-    OMX_VIDEO_H263PROFILETYPE codecProfile;
-    OMX_VIDEO_H263LEVELTYPE codecLevel;
+    int32_t codecProfile;
+    int32_t codecLevel;
     if (profiles.map(profile, &codecProfile)) {
         format->setInt32("profile", codecProfile);
         if (levels.map(level, &codecLevel)) {
@@ -350,59 +368,59 @@
     const uint8_t tier = (ptr[1] & 0x20) >> 5;
     const uint8_t level = ptr[12];
 
-    const static ALookup<std::pair<uint8_t, uint8_t>, OMX_VIDEO_HEVCLEVELTYPE> levels {
-        { { 0, 30  }, OMX_VIDEO_HEVCMainTierLevel1  },
-        { { 0, 60  }, OMX_VIDEO_HEVCMainTierLevel2  },
-        { { 0, 63  }, OMX_VIDEO_HEVCMainTierLevel21 },
-        { { 0, 90  }, OMX_VIDEO_HEVCMainTierLevel3  },
-        { { 0, 93  }, OMX_VIDEO_HEVCMainTierLevel31 },
-        { { 0, 120 }, OMX_VIDEO_HEVCMainTierLevel4  },
-        { { 0, 123 }, OMX_VIDEO_HEVCMainTierLevel41 },
-        { { 0, 150 }, OMX_VIDEO_HEVCMainTierLevel5  },
-        { { 0, 153 }, OMX_VIDEO_HEVCMainTierLevel51 },
-        { { 0, 156 }, OMX_VIDEO_HEVCMainTierLevel52 },
-        { { 0, 180 }, OMX_VIDEO_HEVCMainTierLevel6  },
-        { { 0, 183 }, OMX_VIDEO_HEVCMainTierLevel61 },
-        { { 0, 186 }, OMX_VIDEO_HEVCMainTierLevel62 },
-        { { 1, 30  }, OMX_VIDEO_HEVCHighTierLevel1  },
-        { { 1, 60  }, OMX_VIDEO_HEVCHighTierLevel2  },
-        { { 1, 63  }, OMX_VIDEO_HEVCHighTierLevel21 },
-        { { 1, 90  }, OMX_VIDEO_HEVCHighTierLevel3  },
-        { { 1, 93  }, OMX_VIDEO_HEVCHighTierLevel31 },
-        { { 1, 120 }, OMX_VIDEO_HEVCHighTierLevel4  },
-        { { 1, 123 }, OMX_VIDEO_HEVCHighTierLevel41 },
-        { { 1, 150 }, OMX_VIDEO_HEVCHighTierLevel5  },
-        { { 1, 153 }, OMX_VIDEO_HEVCHighTierLevel51 },
-        { { 1, 156 }, OMX_VIDEO_HEVCHighTierLevel52 },
-        { { 1, 180 }, OMX_VIDEO_HEVCHighTierLevel6  },
-        { { 1, 183 }, OMX_VIDEO_HEVCHighTierLevel61 },
-        { { 1, 186 }, OMX_VIDEO_HEVCHighTierLevel62 },
+    const static ALookup<std::pair<uint8_t, uint8_t>, int32_t> levels {
+        { { 0, 30  }, HEVCMainTierLevel1  },
+        { { 0, 60  }, HEVCMainTierLevel2  },
+        { { 0, 63  }, HEVCMainTierLevel21 },
+        { { 0, 90  }, HEVCMainTierLevel3  },
+        { { 0, 93  }, HEVCMainTierLevel31 },
+        { { 0, 120 }, HEVCMainTierLevel4  },
+        { { 0, 123 }, HEVCMainTierLevel41 },
+        { { 0, 150 }, HEVCMainTierLevel5  },
+        { { 0, 153 }, HEVCMainTierLevel51 },
+        { { 0, 156 }, HEVCMainTierLevel52 },
+        { { 0, 180 }, HEVCMainTierLevel6  },
+        { { 0, 183 }, HEVCMainTierLevel61 },
+        { { 0, 186 }, HEVCMainTierLevel62 },
+        { { 1, 30  }, HEVCHighTierLevel1  },
+        { { 1, 60  }, HEVCHighTierLevel2  },
+        { { 1, 63  }, HEVCHighTierLevel21 },
+        { { 1, 90  }, HEVCHighTierLevel3  },
+        { { 1, 93  }, HEVCHighTierLevel31 },
+        { { 1, 120 }, HEVCHighTierLevel4  },
+        { { 1, 123 }, HEVCHighTierLevel41 },
+        { { 1, 150 }, HEVCHighTierLevel5  },
+        { { 1, 153 }, HEVCHighTierLevel51 },
+        { { 1, 156 }, HEVCHighTierLevel52 },
+        { { 1, 180 }, HEVCHighTierLevel6  },
+        { { 1, 183 }, HEVCHighTierLevel61 },
+        { { 1, 186 }, HEVCHighTierLevel62 },
     };
 
-    const static ALookup<uint8_t, OMX_VIDEO_HEVCPROFILETYPE> profiles {
-        { 1, OMX_VIDEO_HEVCProfileMain   },
-        { 2, OMX_VIDEO_HEVCProfileMain10 },
+    const static ALookup<uint8_t, int32_t> profiles {
+        { 1, HEVCProfileMain   },
+        { 2, HEVCProfileMain10 },
         // use Main for Main Still Picture decoding
-        { 3, OMX_VIDEO_HEVCProfileMain },
+        { 3, HEVCProfileMain },
     };
 
     // set profile & level if they are recognized
-    OMX_VIDEO_HEVCPROFILETYPE codecProfile;
-    OMX_VIDEO_HEVCLEVELTYPE codecLevel;
+    int32_t codecProfile;
+    int32_t codecLevel;
     if (!profiles.map(profile, &codecProfile)) {
         if (ptr[2] & 0x40 /* general compatibility flag 1 */) {
             // Note that this case covers Main Still Picture too
-            codecProfile = OMX_VIDEO_HEVCProfileMain;
+            codecProfile = HEVCProfileMain;
         } else if (ptr[2] & 0x20 /* general compatibility flag 2 */) {
-            codecProfile = OMX_VIDEO_HEVCProfileMain10;
+            codecProfile = HEVCProfileMain10;
         } else {
             return;
         }
     }
 
     // bump to HDR profile
-    if (isHdr(format) && codecProfile == OMX_VIDEO_HEVCProfileMain10) {
-        codecProfile = OMX_VIDEO_HEVCProfileMain10HDR10;
+    if (isHdr(format) && codecProfile == HEVCProfileMain10) {
+        codecProfile = HEVCProfileMain10HDR10;
     }
 
     format->setInt32("profile", codecProfile);
@@ -422,36 +440,36 @@
         }
         const uint8_t indication = ((seq[4] & 0xF) << 4) | ((seq[5] & 0xF0) >> 4);
 
-        const static ALookup<uint8_t, OMX_VIDEO_MPEG2PROFILETYPE> profiles {
-            { 0x50, OMX_VIDEO_MPEG2ProfileSimple  },
-            { 0x40, OMX_VIDEO_MPEG2ProfileMain    },
-            { 0x30, OMX_VIDEO_MPEG2ProfileSNR     },
-            { 0x20, OMX_VIDEO_MPEG2ProfileSpatial },
-            { 0x10, OMX_VIDEO_MPEG2ProfileHigh    },
+        const static ALookup<uint8_t, int32_t> profiles {
+            { 0x50, MPEG2ProfileSimple  },
+            { 0x40, MPEG2ProfileMain    },
+            { 0x30, MPEG2ProfileSNR     },
+            { 0x20, MPEG2ProfileSpatial },
+            { 0x10, MPEG2ProfileHigh    },
         };
 
-        const static ALookup<uint8_t, OMX_VIDEO_MPEG2LEVELTYPE> levels {
-            { 0x0A, OMX_VIDEO_MPEG2LevelLL  },
-            { 0x08, OMX_VIDEO_MPEG2LevelML  },
-            { 0x06, OMX_VIDEO_MPEG2LevelH14 },
-            { 0x04, OMX_VIDEO_MPEG2LevelHL  },
-            { 0x02, OMX_VIDEO_MPEG2LevelHP  },
+        const static ALookup<uint8_t, int32_t> levels {
+            { 0x0A, MPEG2LevelLL  },
+            { 0x08, MPEG2LevelML  },
+            { 0x06, MPEG2LevelH14 },
+            { 0x04, MPEG2LevelHL  },
+            { 0x02, MPEG2LevelHP  },
         };
 
         const static ALookup<uint8_t,
-                std::pair<OMX_VIDEO_MPEG2PROFILETYPE, OMX_VIDEO_MPEG2LEVELTYPE>> escapes {
+                std::pair<int32_t, int32_t>> escapes {
             /* unsupported
-            { 0x8E, { XXX_MPEG2ProfileMultiView, OMX_VIDEO_MPEG2LevelLL  } },
-            { 0x8D, { XXX_MPEG2ProfileMultiView, OMX_VIDEO_MPEG2LevelML  } },
-            { 0x8B, { XXX_MPEG2ProfileMultiView, OMX_VIDEO_MPEG2LevelH14 } },
-            { 0x8A, { XXX_MPEG2ProfileMultiView, OMX_VIDEO_MPEG2LevelHL  } }, */
-            { 0x85, { OMX_VIDEO_MPEG2Profile422, OMX_VIDEO_MPEG2LevelML  } },
-            { 0x82, { OMX_VIDEO_MPEG2Profile422, OMX_VIDEO_MPEG2LevelHL  } },
+            { 0x8E, { XXX_MPEG2ProfileMultiView, MPEG2LevelLL  } },
+            { 0x8D, { XXX_MPEG2ProfileMultiView, MPEG2LevelML  } },
+            { 0x8B, { XXX_MPEG2ProfileMultiView, MPEG2LevelH14 } },
+            { 0x8A, { XXX_MPEG2ProfileMultiView, MPEG2LevelHL  } }, */
+            { 0x85, { MPEG2Profile422, MPEG2LevelML  } },
+            { 0x82, { MPEG2Profile422, MPEG2LevelHL  } },
         };
 
-        OMX_VIDEO_MPEG2PROFILETYPE profile;
-        OMX_VIDEO_MPEG2LEVELTYPE level;
-        std::pair<OMX_VIDEO_MPEG2PROFILETYPE, OMX_VIDEO_MPEG2LEVELTYPE> profileLevel;
+        int32_t profile;
+        int32_t level;
+        std::pair<int32_t, int32_t> profileLevel;
         if (escapes.map(indication, &profileLevel)) {
             format->setInt32("profile", profileLevel.first);
             format->setInt32("level", profileLevel.second);
@@ -468,16 +486,16 @@
     // esds seems to only contain the profile for MPEG-2
     uint8_t objType;
     if (esds.getObjectTypeIndication(&objType) == OK) {
-        const static ALookup<uint8_t, OMX_VIDEO_MPEG2PROFILETYPE> profiles{
-            { 0x60, OMX_VIDEO_MPEG2ProfileSimple  },
-            { 0x61, OMX_VIDEO_MPEG2ProfileMain    },
-            { 0x62, OMX_VIDEO_MPEG2ProfileSNR     },
-            { 0x63, OMX_VIDEO_MPEG2ProfileSpatial },
-            { 0x64, OMX_VIDEO_MPEG2ProfileHigh    },
-            { 0x65, OMX_VIDEO_MPEG2Profile422     },
+        const static ALookup<uint8_t, int32_t> profiles{
+            { 0x60, MPEG2ProfileSimple  },
+            { 0x61, MPEG2ProfileMain    },
+            { 0x62, MPEG2ProfileSNR     },
+            { 0x63, MPEG2ProfileSpatial },
+            { 0x64, MPEG2ProfileHigh    },
+            { 0x65, MPEG2Profile422     },
         };
 
-        OMX_VIDEO_MPEG2PROFILETYPE profile;
+        int32_t profile;
         if (profiles.map(objType, &profile)) {
             format->setInt32("profile", profile);
         }
@@ -492,82 +510,82 @@
         const uint8_t indication = seq[4];
 
         const static ALookup<uint8_t,
-                std::pair<OMX_VIDEO_MPEG4PROFILETYPE, OMX_VIDEO_MPEG4LEVELTYPE>> table {
-            { 0b00000001, { OMX_VIDEO_MPEG4ProfileSimple,            OMX_VIDEO_MPEG4Level1  } },
-            { 0b00000010, { OMX_VIDEO_MPEG4ProfileSimple,            OMX_VIDEO_MPEG4Level2  } },
-            { 0b00000011, { OMX_VIDEO_MPEG4ProfileSimple,            OMX_VIDEO_MPEG4Level3  } },
-            { 0b00000100, { OMX_VIDEO_MPEG4ProfileSimple,            OMX_VIDEO_MPEG4Level4a } },
-            { 0b00000101, { OMX_VIDEO_MPEG4ProfileSimple,            OMX_VIDEO_MPEG4Level5  } },
-            { 0b00000110, { OMX_VIDEO_MPEG4ProfileSimple,            OMX_VIDEO_MPEG4Level6  } },
-            { 0b00001000, { OMX_VIDEO_MPEG4ProfileSimple,            OMX_VIDEO_MPEG4Level0  } },
-            { 0b00001001, { OMX_VIDEO_MPEG4ProfileSimple,            OMX_VIDEO_MPEG4Level0b } },
-            { 0b00010000, { OMX_VIDEO_MPEG4ProfileSimpleScalable,    OMX_VIDEO_MPEG4Level0  } },
-            { 0b00010001, { OMX_VIDEO_MPEG4ProfileSimpleScalable,    OMX_VIDEO_MPEG4Level1  } },
-            { 0b00010010, { OMX_VIDEO_MPEG4ProfileSimpleScalable,    OMX_VIDEO_MPEG4Level2  } },
+                std::pair<int32_t, int32_t>> table {
+            { 0b00000001, { MPEG4ProfileSimple,            MPEG4Level1  } },
+            { 0b00000010, { MPEG4ProfileSimple,            MPEG4Level2  } },
+            { 0b00000011, { MPEG4ProfileSimple,            MPEG4Level3  } },
+            { 0b00000100, { MPEG4ProfileSimple,            MPEG4Level4a } },
+            { 0b00000101, { MPEG4ProfileSimple,            MPEG4Level5  } },
+            { 0b00000110, { MPEG4ProfileSimple,            MPEG4Level6  } },
+            { 0b00001000, { MPEG4ProfileSimple,            MPEG4Level0  } },
+            { 0b00001001, { MPEG4ProfileSimple,            MPEG4Level0b } },
+            { 0b00010000, { MPEG4ProfileSimpleScalable,    MPEG4Level0  } },
+            { 0b00010001, { MPEG4ProfileSimpleScalable,    MPEG4Level1  } },
+            { 0b00010010, { MPEG4ProfileSimpleScalable,    MPEG4Level2  } },
             /* unsupported
-            { 0b00011101, { XXX_MPEG4ProfileSimpleScalableER,        OMX_VIDEO_MPEG4Level0  } },
-            { 0b00011110, { XXX_MPEG4ProfileSimpleScalableER,        OMX_VIDEO_MPEG4Level1  } },
-            { 0b00011111, { XXX_MPEG4ProfileSimpleScalableER,        OMX_VIDEO_MPEG4Level2  } }, */
-            { 0b00100001, { OMX_VIDEO_MPEG4ProfileCore,              OMX_VIDEO_MPEG4Level1  } },
-            { 0b00100010, { OMX_VIDEO_MPEG4ProfileCore,              OMX_VIDEO_MPEG4Level2  } },
-            { 0b00110010, { OMX_VIDEO_MPEG4ProfileMain,              OMX_VIDEO_MPEG4Level2  } },
-            { 0b00110011, { OMX_VIDEO_MPEG4ProfileMain,              OMX_VIDEO_MPEG4Level3  } },
-            { 0b00110100, { OMX_VIDEO_MPEG4ProfileMain,              OMX_VIDEO_MPEG4Level4  } },
+            { 0b00011101, { XXX_MPEG4ProfileSimpleScalableER,        MPEG4Level0  } },
+            { 0b00011110, { XXX_MPEG4ProfileSimpleScalableER,        MPEG4Level1  } },
+            { 0b00011111, { XXX_MPEG4ProfileSimpleScalableER,        MPEG4Level2  } }, */
+            { 0b00100001, { MPEG4ProfileCore,              MPEG4Level1  } },
+            { 0b00100010, { MPEG4ProfileCore,              MPEG4Level2  } },
+            { 0b00110010, { MPEG4ProfileMain,              MPEG4Level2  } },
+            { 0b00110011, { MPEG4ProfileMain,              MPEG4Level3  } },
+            { 0b00110100, { MPEG4ProfileMain,              MPEG4Level4  } },
             /* deprecated
-            { 0b01000010, { OMX_VIDEO_MPEG4ProfileNbit,              OMX_VIDEO_MPEG4Level2  } }, */
-            { 0b01010001, { OMX_VIDEO_MPEG4ProfileScalableTexture,   OMX_VIDEO_MPEG4Level1  } },
-            { 0b01100001, { OMX_VIDEO_MPEG4ProfileSimpleFace,        OMX_VIDEO_MPEG4Level1  } },
-            { 0b01100010, { OMX_VIDEO_MPEG4ProfileSimpleFace,        OMX_VIDEO_MPEG4Level2  } },
-            { 0b01100011, { OMX_VIDEO_MPEG4ProfileSimpleFBA,         OMX_VIDEO_MPEG4Level1  } },
-            { 0b01100100, { OMX_VIDEO_MPEG4ProfileSimpleFBA,         OMX_VIDEO_MPEG4Level2  } },
-            { 0b01110001, { OMX_VIDEO_MPEG4ProfileBasicAnimated,     OMX_VIDEO_MPEG4Level1  } },
-            { 0b01110010, { OMX_VIDEO_MPEG4ProfileBasicAnimated,     OMX_VIDEO_MPEG4Level2  } },
-            { 0b10000001, { OMX_VIDEO_MPEG4ProfileHybrid,            OMX_VIDEO_MPEG4Level1  } },
-            { 0b10000010, { OMX_VIDEO_MPEG4ProfileHybrid,            OMX_VIDEO_MPEG4Level2  } },
-            { 0b10010001, { OMX_VIDEO_MPEG4ProfileAdvancedRealTime,  OMX_VIDEO_MPEG4Level1  } },
-            { 0b10010010, { OMX_VIDEO_MPEG4ProfileAdvancedRealTime,  OMX_VIDEO_MPEG4Level2  } },
-            { 0b10010011, { OMX_VIDEO_MPEG4ProfileAdvancedRealTime,  OMX_VIDEO_MPEG4Level3  } },
-            { 0b10010100, { OMX_VIDEO_MPEG4ProfileAdvancedRealTime,  OMX_VIDEO_MPEG4Level4  } },
-            { 0b10100001, { OMX_VIDEO_MPEG4ProfileCoreScalable,      OMX_VIDEO_MPEG4Level1  } },
-            { 0b10100010, { OMX_VIDEO_MPEG4ProfileCoreScalable,      OMX_VIDEO_MPEG4Level2  } },
-            { 0b10100011, { OMX_VIDEO_MPEG4ProfileCoreScalable,      OMX_VIDEO_MPEG4Level3  } },
-            { 0b10110001, { OMX_VIDEO_MPEG4ProfileAdvancedCoding,    OMX_VIDEO_MPEG4Level1  } },
-            { 0b10110010, { OMX_VIDEO_MPEG4ProfileAdvancedCoding,    OMX_VIDEO_MPEG4Level2  } },
-            { 0b10110011, { OMX_VIDEO_MPEG4ProfileAdvancedCoding,    OMX_VIDEO_MPEG4Level3  } },
-            { 0b10110100, { OMX_VIDEO_MPEG4ProfileAdvancedCoding,    OMX_VIDEO_MPEG4Level4  } },
-            { 0b11000001, { OMX_VIDEO_MPEG4ProfileAdvancedCore,      OMX_VIDEO_MPEG4Level1  } },
-            { 0b11000010, { OMX_VIDEO_MPEG4ProfileAdvancedCore,      OMX_VIDEO_MPEG4Level2  } },
-            { 0b11010001, { OMX_VIDEO_MPEG4ProfileAdvancedScalable,  OMX_VIDEO_MPEG4Level1  } },
-            { 0b11010010, { OMX_VIDEO_MPEG4ProfileAdvancedScalable,  OMX_VIDEO_MPEG4Level2  } },
-            { 0b11010011, { OMX_VIDEO_MPEG4ProfileAdvancedScalable,  OMX_VIDEO_MPEG4Level3  } },
+            { 0b01000010, { MPEG4ProfileNbit,              MPEG4Level2  } }, */
+            { 0b01010001, { MPEG4ProfileScalableTexture,   MPEG4Level1  } },
+            { 0b01100001, { MPEG4ProfileSimpleFace,        MPEG4Level1  } },
+            { 0b01100010, { MPEG4ProfileSimpleFace,        MPEG4Level2  } },
+            { 0b01100011, { MPEG4ProfileSimpleFBA,         MPEG4Level1  } },
+            { 0b01100100, { MPEG4ProfileSimpleFBA,         MPEG4Level2  } },
+            { 0b01110001, { MPEG4ProfileBasicAnimated,     MPEG4Level1  } },
+            { 0b01110010, { MPEG4ProfileBasicAnimated,     MPEG4Level2  } },
+            { 0b10000001, { MPEG4ProfileHybrid,            MPEG4Level1  } },
+            { 0b10000010, { MPEG4ProfileHybrid,            MPEG4Level2  } },
+            { 0b10010001, { MPEG4ProfileAdvancedRealTime,  MPEG4Level1  } },
+            { 0b10010010, { MPEG4ProfileAdvancedRealTime,  MPEG4Level2  } },
+            { 0b10010011, { MPEG4ProfileAdvancedRealTime,  MPEG4Level3  } },
+            { 0b10010100, { MPEG4ProfileAdvancedRealTime,  MPEG4Level4  } },
+            { 0b10100001, { MPEG4ProfileCoreScalable,      MPEG4Level1  } },
+            { 0b10100010, { MPEG4ProfileCoreScalable,      MPEG4Level2  } },
+            { 0b10100011, { MPEG4ProfileCoreScalable,      MPEG4Level3  } },
+            { 0b10110001, { MPEG4ProfileAdvancedCoding,    MPEG4Level1  } },
+            { 0b10110010, { MPEG4ProfileAdvancedCoding,    MPEG4Level2  } },
+            { 0b10110011, { MPEG4ProfileAdvancedCoding,    MPEG4Level3  } },
+            { 0b10110100, { MPEG4ProfileAdvancedCoding,    MPEG4Level4  } },
+            { 0b11000001, { MPEG4ProfileAdvancedCore,      MPEG4Level1  } },
+            { 0b11000010, { MPEG4ProfileAdvancedCore,      MPEG4Level2  } },
+            { 0b11010001, { MPEG4ProfileAdvancedScalable,  MPEG4Level1  } },
+            { 0b11010010, { MPEG4ProfileAdvancedScalable,  MPEG4Level2  } },
+            { 0b11010011, { MPEG4ProfileAdvancedScalable,  MPEG4Level3  } },
             /* unsupported
-            { 0b11100001, { XXX_MPEG4ProfileSimpleStudio,            OMX_VIDEO_MPEG4Level1  } },
-            { 0b11100010, { XXX_MPEG4ProfileSimpleStudio,            OMX_VIDEO_MPEG4Level2  } },
-            { 0b11100011, { XXX_MPEG4ProfileSimpleStudio,            OMX_VIDEO_MPEG4Level3  } },
-            { 0b11100100, { XXX_MPEG4ProfileSimpleStudio,            OMX_VIDEO_MPEG4Level4  } },
-            { 0b11100101, { XXX_MPEG4ProfileCoreStudio,              OMX_VIDEO_MPEG4Level1  } },
-            { 0b11100110, { XXX_MPEG4ProfileCoreStudio,              OMX_VIDEO_MPEG4Level2  } },
-            { 0b11100111, { XXX_MPEG4ProfileCoreStudio,              OMX_VIDEO_MPEG4Level3  } },
-            { 0b11101000, { XXX_MPEG4ProfileCoreStudio,              OMX_VIDEO_MPEG4Level4  } },
-            { 0b11101011, { XXX_MPEG4ProfileSimpleStudio,            OMX_VIDEO_MPEG4Level5  } },
-            { 0b11101100, { XXX_MPEG4ProfileSimpleStudio,            OMX_VIDEO_MPEG4Level6  } }, */
-            { 0b11110000, { OMX_VIDEO_MPEG4ProfileAdvancedSimple,    OMX_VIDEO_MPEG4Level0  } },
-            { 0b11110001, { OMX_VIDEO_MPEG4ProfileAdvancedSimple,    OMX_VIDEO_MPEG4Level1  } },
-            { 0b11110010, { OMX_VIDEO_MPEG4ProfileAdvancedSimple,    OMX_VIDEO_MPEG4Level2  } },
-            { 0b11110011, { OMX_VIDEO_MPEG4ProfileAdvancedSimple,    OMX_VIDEO_MPEG4Level3  } },
-            { 0b11110100, { OMX_VIDEO_MPEG4ProfileAdvancedSimple,    OMX_VIDEO_MPEG4Level4  } },
-            { 0b11110101, { OMX_VIDEO_MPEG4ProfileAdvancedSimple,    OMX_VIDEO_MPEG4Level5  } },
-            { 0b11110111, { OMX_VIDEO_MPEG4ProfileAdvancedSimple,    OMX_VIDEO_MPEG4Level3b } },
+            { 0b11100001, { XXX_MPEG4ProfileSimpleStudio,            MPEG4Level1  } },
+            { 0b11100010, { XXX_MPEG4ProfileSimpleStudio,            MPEG4Level2  } },
+            { 0b11100011, { XXX_MPEG4ProfileSimpleStudio,            MPEG4Level3  } },
+            { 0b11100100, { XXX_MPEG4ProfileSimpleStudio,            MPEG4Level4  } },
+            { 0b11100101, { XXX_MPEG4ProfileCoreStudio,              MPEG4Level1  } },
+            { 0b11100110, { XXX_MPEG4ProfileCoreStudio,              MPEG4Level2  } },
+            { 0b11100111, { XXX_MPEG4ProfileCoreStudio,              MPEG4Level3  } },
+            { 0b11101000, { XXX_MPEG4ProfileCoreStudio,              MPEG4Level4  } },
+            { 0b11101011, { XXX_MPEG4ProfileSimpleStudio,            MPEG4Level5  } },
+            { 0b11101100, { XXX_MPEG4ProfileSimpleStudio,            MPEG4Level6  } }, */
+            { 0b11110000, { MPEG4ProfileAdvancedSimple,    MPEG4Level0  } },
+            { 0b11110001, { MPEG4ProfileAdvancedSimple,    MPEG4Level1  } },
+            { 0b11110010, { MPEG4ProfileAdvancedSimple,    MPEG4Level2  } },
+            { 0b11110011, { MPEG4ProfileAdvancedSimple,    MPEG4Level3  } },
+            { 0b11110100, { MPEG4ProfileAdvancedSimple,    MPEG4Level4  } },
+            { 0b11110101, { MPEG4ProfileAdvancedSimple,    MPEG4Level5  } },
+            { 0b11110111, { MPEG4ProfileAdvancedSimple,    MPEG4Level3b } },
             /* deprecated
-            { 0b11111000, { XXX_MPEG4ProfileFineGranularityScalable, OMX_VIDEO_MPEG4Level0  } },
-            { 0b11111001, { XXX_MPEG4ProfileFineGranularityScalable, OMX_VIDEO_MPEG4Level1  } },
-            { 0b11111010, { XXX_MPEG4ProfileFineGranularityScalable, OMX_VIDEO_MPEG4Level2  } },
-            { 0b11111011, { XXX_MPEG4ProfileFineGranularityScalable, OMX_VIDEO_MPEG4Level3  } },
-            { 0b11111100, { XXX_MPEG4ProfileFineGranularityScalable, OMX_VIDEO_MPEG4Level4  } },
-            { 0b11111101, { XXX_MPEG4ProfileFineGranularityScalable, OMX_VIDEO_MPEG4Level5  } }, */
+            { 0b11111000, { XXX_MPEG4ProfileFineGranularityScalable, MPEG4Level0  } },
+            { 0b11111001, { XXX_MPEG4ProfileFineGranularityScalable, MPEG4Level1  } },
+            { 0b11111010, { XXX_MPEG4ProfileFineGranularityScalable, MPEG4Level2  } },
+            { 0b11111011, { XXX_MPEG4ProfileFineGranularityScalable, MPEG4Level3  } },
+            { 0b11111100, { XXX_MPEG4ProfileFineGranularityScalable, MPEG4Level4  } },
+            { 0b11111101, { XXX_MPEG4ProfileFineGranularityScalable, MPEG4Level5  } }, */
         };
 
-        std::pair<OMX_VIDEO_MPEG4PROFILETYPE, OMX_VIDEO_MPEG4LEVELTYPE> profileLevel;
+        std::pair<int32_t, int32_t> profileLevel;
         if (table.map(indication, &profileLevel)) {
             format->setInt32("profile", profileLevel.first);
             format->setInt32("level", profileLevel.second);
@@ -590,19 +608,19 @@
         switch (id) {
             case 1 /* profileId */:
                 if (length >= 1) {
-                    const static ALookup<uint8_t, OMX_VIDEO_VP9PROFILETYPE> profiles {
-                        { 0, OMX_VIDEO_VP9Profile0 },
-                        { 1, OMX_VIDEO_VP9Profile1 },
-                        { 2, OMX_VIDEO_VP9Profile2 },
-                        { 3, OMX_VIDEO_VP9Profile3 },
+                    const static ALookup<uint8_t, int32_t> profiles {
+                        { 0, VP9Profile0 },
+                        { 1, VP9Profile1 },
+                        { 2, VP9Profile2 },
+                        { 3, VP9Profile3 },
                     };
 
-                    const static ALookup<OMX_VIDEO_VP9PROFILETYPE, OMX_VIDEO_VP9PROFILETYPE> toHdr {
-                        { OMX_VIDEO_VP9Profile2, OMX_VIDEO_VP9Profile2HDR },
-                        { OMX_VIDEO_VP9Profile3, OMX_VIDEO_VP9Profile3HDR },
+                    const static ALookup<int32_t, int32_t> toHdr {
+                        { VP9Profile2, VP9Profile2HDR },
+                        { VP9Profile3, VP9Profile3HDR },
                     };
 
-                    OMX_VIDEO_VP9PROFILETYPE profile;
+                    int32_t profile;
                     if (profiles.map(data[0], &profile)) {
                         // convert to HDR profile
                         if (isHdr(format)) {
@@ -615,24 +633,24 @@
                 break;
             case 2 /* levelId */:
                 if (length >= 1) {
-                    const static ALookup<uint8_t, OMX_VIDEO_VP9LEVELTYPE> levels {
-                        { 10, OMX_VIDEO_VP9Level1  },
-                        { 11, OMX_VIDEO_VP9Level11 },
-                        { 20, OMX_VIDEO_VP9Level2  },
-                        { 21, OMX_VIDEO_VP9Level21 },
-                        { 30, OMX_VIDEO_VP9Level3  },
-                        { 31, OMX_VIDEO_VP9Level31 },
-                        { 40, OMX_VIDEO_VP9Level4  },
-                        { 41, OMX_VIDEO_VP9Level41 },
-                        { 50, OMX_VIDEO_VP9Level5  },
-                        { 51, OMX_VIDEO_VP9Level51 },
-                        { 52, OMX_VIDEO_VP9Level52 },
-                        { 60, OMX_VIDEO_VP9Level6  },
-                        { 61, OMX_VIDEO_VP9Level61 },
-                        { 62, OMX_VIDEO_VP9Level62 },
+                    const static ALookup<uint8_t, int32_t> levels {
+                        { 10, VP9Level1  },
+                        { 11, VP9Level11 },
+                        { 20, VP9Level2  },
+                        { 21, VP9Level21 },
+                        { 30, VP9Level3  },
+                        { 31, VP9Level31 },
+                        { 40, VP9Level4  },
+                        { 41, VP9Level41 },
+                        { 50, VP9Level5  },
+                        { 51, VP9Level51 },
+                        { 52, VP9Level52 },
+                        { 60, VP9Level6  },
+                        { 61, VP9Level61 },
+                        { 62, VP9Level62 },
                     };
 
-                    OMX_VIDEO_VP9LEVELTYPE level;
+                    int32_t level;
                     if (levels.map(data[0], &level)) {
                         format->setInt32("level", level);
                     }
@@ -1504,10 +1522,21 @@
         msg->setBuffer("csd-0", buffer);
     }
 
-    if (meta->findData(kKeyDVCC, &type, &data, &size)) {
+    if (meta->findData(kKeyDVCC, &type, &data, &size)
+            || meta->findData(kKeyDVVC, &type, &data, &size)
+            || meta->findData(kKeyDVWC, &type, &data, &size)) {
         const uint8_t *ptr = (const uint8_t *)data;
         ALOGV("DV: calling parseDolbyVisionProfileLevelFromDvcc with data size %zu", size);
         parseDolbyVisionProfileLevelFromDvcc(ptr, size, msg);
+        sp<ABuffer> buffer = new (std::nothrow) ABuffer(size);
+        if (buffer.get() == nullptr || buffer->base() == nullptr) {
+            return NO_MEMORY;
+        }
+        memcpy(buffer->data(), data, size);
+
+        buffer->meta()->setInt32("csd", true);
+        buffer->meta()->setInt64("timeUs", 0);
+        msg->setBuffer("csd-2", buffer);
     }
 
     *format = msg;
@@ -1759,24 +1788,39 @@
     if (mime.startsWith("video/") || mime.startsWith("image/")) {
         int32_t width;
         int32_t height;
-        if (msg->findInt32("width", &width) && msg->findInt32("height", &height)) {
-            meta->setInt32(kKeyWidth, width);
-            meta->setInt32(kKeyHeight, height);
-        } else {
+        if (!msg->findInt32("width", &width) || !msg->findInt32("height", &height)) {
             ALOGV("did not find width and/or height");
             return BAD_VALUE;
         }
+        if (width <= 0 || height <= 0) {
+            ALOGE("Invalid value of width: %d and/or height: %d", width, height);
+            return BAD_VALUE;
+        }
+        meta->setInt32(kKeyWidth, width);
+        meta->setInt32(kKeyHeight, height);
 
-        int32_t sarWidth, sarHeight;
-        if (msg->findInt32("sar-width", &sarWidth)
-                && msg->findInt32("sar-height", &sarHeight)) {
+        int32_t sarWidth = -1, sarHeight = -1;
+        bool foundWidth, foundHeight;
+        foundWidth = msg->findInt32("sar-width", &sarWidth);
+        foundHeight = msg->findInt32("sar-height", &sarHeight);
+        if (foundWidth || foundHeight) {
+            if (sarWidth <= 0 || sarHeight <= 0) {
+                ALOGE("Invalid value of sarWidth: %d and/or sarHeight: %d", sarWidth, sarHeight);
+                return BAD_VALUE;
+            }
             meta->setInt32(kKeySARWidth, sarWidth);
             meta->setInt32(kKeySARHeight, sarHeight);
         }
 
-        int32_t displayWidth, displayHeight;
-        if (msg->findInt32("display-width", &displayWidth)
-                && msg->findInt32("display-height", &displayHeight)) {
+        int32_t displayWidth = -1, displayHeight = -1;
+        foundWidth = msg->findInt32("display-width", &displayWidth);
+        foundHeight = msg->findInt32("display-height", &displayHeight);
+        if (foundWidth || foundHeight) {
+            if (displayWidth <= 0 || displayHeight <= 0) {
+                ALOGE("Invalid value of displayWidth: %d and/or displayHeight: %d",
+                        displayWidth, displayHeight);
+                return BAD_VALUE;
+            }
             meta->setInt32(kKeyDisplayWidth, displayWidth);
             meta->setInt32(kKeyDisplayHeight, displayHeight);
         }
@@ -1786,17 +1830,29 @@
             if (msg->findInt32("is-default", &isPrimary) && isPrimary) {
                 meta->setInt32(kKeyTrackIsDefault, 1);
             }
-            int32_t tileWidth, tileHeight, gridRows, gridCols;
-            if (msg->findInt32("tile-width", &tileWidth)) {
+            int32_t tileWidth = -1, tileHeight = -1;
+            foundWidth = msg->findInt32("tile-width", &tileWidth);
+            foundHeight = msg->findInt32("tile-height", &tileHeight);
+            if (foundWidth || foundHeight) {
+                if (tileWidth <= 0 || tileHeight <= 0) {
+                    ALOGE("Invalid value of tileWidth: %d and/or tileHeight: %d",
+                            tileWidth, tileHeight);
+                    return BAD_VALUE;
+                }
                 meta->setInt32(kKeyTileWidth, tileWidth);
-            }
-            if (msg->findInt32("tile-height", &tileHeight)) {
                 meta->setInt32(kKeyTileHeight, tileHeight);
             }
-            if (msg->findInt32("grid-rows", &gridRows)) {
+            int32_t gridRows = -1, gridCols = -1;
+            bool foundRows, foundCols;
+            foundRows = msg->findInt32("grid-rows", &gridRows);
+            foundCols = msg->findInt32("grid-cols", &gridCols);
+            if (foundRows || foundCols) {
+                if (gridRows <= 0 || gridCols <= 0) {
+                    ALOGE("Invalid value of gridRows: %d and/or gridCols: %d",
+                            gridRows, gridCols);
+                    return BAD_VALUE;
+                }
                 meta->setInt32(kKeyGridRows, gridRows);
-            }
-            if (msg->findInt32("grid-cols", &gridCols)) {
                 meta->setInt32(kKeyGridCols, gridCols);
             }
         }
@@ -1812,6 +1868,14 @@
                           &cropTop,
                           &cropRight,
                           &cropBottom)) {
+            if (cropLeft < 0 || cropLeft > cropRight || cropRight >= width) {
+                ALOGE("Invalid value of cropLeft: %d and/or cropRight: %d", cropLeft, cropRight);
+                return BAD_VALUE;
+            }
+            if (cropTop < 0 || cropTop > cropBottom || cropBottom >= height) {
+                ALOGE("Invalid value of cropTop: %d and/or cropBottom: %d", cropTop, cropBottom);
+                return BAD_VALUE;
+            }
             meta->setRect(kKeyCropRect, cropLeft, cropTop, cropRight, cropBottom);
         }
 
@@ -1855,9 +1919,16 @@
             ALOGV("did not find channel-count and/or sample-rate");
             return BAD_VALUE;
         }
+        // channel count can be zero in some cases like mpeg h
+        if (sampleRate <= 0 || numChannels < 0) {
+            ALOGE("Invalid value of channel-count: %d and/or sample-rate: %d",
+                   numChannels, sampleRate);
+            return BAD_VALUE;
+        }
         meta->setInt32(kKeyChannelCount, numChannels);
         meta->setInt32(kKeySampleRate, sampleRate);
         int32_t bitsPerSample;
+        // TODO:(b/204430952) add appropriate bound check for bitsPerSample
         if (msg->findInt32("bits-per-sample", &bitsPerSample)) {
             meta->setInt32(kKeyBitsPerSample, bitsPerSample);
         }
@@ -1967,30 +2038,147 @@
                    mime == MEDIA_MIMETYPE_IMAGE_AVIF) {
             meta->setData(kKeyAV1C, 0, csd0->data(), csd0->size());
         } else if (mime == MEDIA_MIMETYPE_VIDEO_DOLBY_VISION) {
-            if (msg->findBuffer("csd-2", &csd2)) {
-                //dvcc should be 24
-                if (csd2->size() == 24) {
-                    meta->setData(kKeyDVCC, kTypeDVCC, csd2->data(), csd2->size());
-                    uint8_t *dvcc = csd2->data();
-                    const uint8_t profile = dvcc[2] >> 1;
-                    if (profile > 1 && profile < 9) {
-                        std::vector<uint8_t> hvcc(csd0size + 1024);
-                        size_t outsize = reassembleHVCC(csd0, hvcc.data(), hvcc.size(), 4);
-                        meta->setData(kKeyHVCC, kTypeHVCC, hvcc.data(), outsize);
-                    } else if (DolbyVisionProfileDvav110 == profile) {
-                        meta->setData(kKeyAV1C, 0, csd0->data(), csd0->size());
-                    } else {
-                        sp<ABuffer> csd1;
-                        if (msg->findBuffer("csd-1", &csd1)) {
-                            std::vector<char> avcc(csd0size + csd1->size() + 1024);
-                            size_t outsize = reassembleAVCC(csd0, csd1, avcc.data());
-                            meta->setData(kKeyAVCC, kTypeAVCC, avcc.data(), outsize);
-                        }
-                    }
+            int32_t profile = -1;
+            uint8_t blCompatibilityId = -1;
+            int32_t level = 0;
+            uint8_t profileVal = -1;
+            uint8_t profileVal1 = -1;
+            uint8_t profileVal2 = -1;
+            constexpr size_t dvccSize = 24;
+
+            const ALookup<uint8_t, int32_t> &profiles =
+                getDolbyVisionProfileTable();
+            const ALookup<uint8_t, int32_t> &levels =
+                getDolbyVisionLevelsTable();
+
+            if (!msg->findBuffer("csd-2", &csd2)) {
+                // MP4 extractors are expected to generate csd buffer
+                // some encoders might not be generating it, in which
+                // case we populate the track metadata dv (cc|vc|wc)
+                // from the 'profile' and 'level' info.
+                // This is done according to Dolby Vision ISOBMFF spec
+
+                if (!msg->findInt32("profile", &profile)) {
+                    ALOGE("Dolby Vision profile not found");
+                    return BAD_VALUE;
                 }
+                msg->findInt32("level", &level);
+
+                if (profile == DolbyVisionProfileDvheSt) {
+                    if (!profiles.rlookup(DolbyVisionProfileDvheSt, &profileVal)) { // dvhe.08
+                        ALOGE("Dolby Vision profile lookup error");
+                        return BAD_VALUE;
+                    }
+                    blCompatibilityId = 4;
+                } else if (profile == DolbyVisionProfileDvavSe) {
+                    if (!profiles.rlookup(DolbyVisionProfileDvavSe, &profileVal)) { // dvav.09
+                        ALOGE("Dolby Vision profile lookup error");
+                        return BAD_VALUE;
+                    }
+                    blCompatibilityId = 2;
+                } else {
+                    ALOGE("Dolby Vision profile look up error");
+                    return BAD_VALUE;
+                }
+
+                profile = (int32_t) profileVal;
+
+                uint8_t level_val = 0;
+                if (!levels.map(level, &level_val)) {
+                    ALOGE("Dolby Vision level lookup error");
+                    return BAD_VALUE;
+                }
+
+                std::vector<uint8_t> dvcc(dvccSize);
+
+                dvcc[0] = 1; // major version
+                dvcc[1] = 0; // minor version
+                dvcc[2] = (uint8_t)((profile & 0x7f) << 1); // dolby vision profile
+                dvcc[2] = (uint8_t)((dvcc[2] | (uint8_t)((level_val >> 5) & 0x1)) & 0xff);
+                dvcc[3] = (uint8_t)((level_val & 0x1f) << 3); // dolby vision level
+                dvcc[3] = (uint8_t)(dvcc[3] | (1 << 2)); // rpu_present_flag
+                dvcc[3] = (uint8_t)(dvcc[3] | (1)); // bl_present_flag
+                dvcc[4] = (uint8_t)(blCompatibilityId << 4); // bl_compatibility id
+
+                profiles.rlookup(DolbyVisionProfileDvav110, &profileVal);
+                profiles.rlookup(DolbyVisionProfileDvheDtb, &profileVal1);
+                if (profile > (int32_t) profileVal) {
+                    meta->setData(kKeyDVWC, kTypeDVWC, dvcc.data(), dvccSize);
+                } else if (profile > (int32_t) profileVal1) {
+                    meta->setData(kKeyDVVC, kTypeDVVC, dvcc.data(), dvccSize);
+                } else {
+                    meta->setData(kKeyDVCC, kTypeDVCC, dvcc.data(), dvccSize);
+                }
+
             } else {
-                ALOGE("We need csd-2!!. %s", msg->debugString().c_str());
-                return BAD_VALUE;
+                // we have csd-2, just use that to populate dvcc
+                if (csd2->size() == dvccSize) {
+                    uint8_t *dvcc = csd2->data();
+                    profile = dvcc[2] >> 1;
+
+                    profiles.rlookup(DolbyVisionProfileDvav110, &profileVal);
+                    profiles.rlookup(DolbyVisionProfileDvheDtb, &profileVal1);
+                    if (profile > (int32_t) profileVal) {
+                        meta->setData(kKeyDVWC, kTypeDVWC, csd2->data(), csd2->size());
+                    } else if (profile > (int32_t) profileVal1) {
+                        meta->setData(kKeyDVVC, kTypeDVVC, csd2->data(), csd2->size());
+                    } else {
+                         meta->setData(kKeyDVCC, kTypeDVCC, csd2->data(), csd2->size());
+                    }
+
+                } else {
+                    ALOGE("Convert MessageToMetadata csd-2 is present but not valid");
+                    return BAD_VALUE;
+                }
+            }
+            profiles.rlookup(DolbyVisionProfileDvavPen, &profileVal);
+            profiles.rlookup(DolbyVisionProfileDvavSe, &profileVal1);
+            profiles.rlookup(DolbyVisionProfileDvav110, &profileVal2);
+            if ((profile > (int32_t) profileVal) && (profile < (int32_t) profileVal1)) {
+                std::vector<uint8_t> hvcc(csd0size + 1024);
+                size_t outsize = reassembleHVCC(csd0, hvcc.data(), hvcc.size(), 4);
+                meta->setData(kKeyHVCC, kTypeHVCC, hvcc.data(), outsize);
+            } else if (profile == (int32_t) profileVal2) {
+                meta->setData(kKeyAV1C, 0, csd0->data(), csd0->size());
+            } else {
+                sp<ABuffer> csd1;
+                if (msg->findBuffer("csd-1", &csd1)) {
+                    std::vector<char> avcc(csd0size + csd1->size() + 1024);
+                    size_t outsize = reassembleAVCC(csd0, csd1, avcc.data());
+                    meta->setData(kKeyAVCC, kTypeAVCC, avcc.data(), outsize);
+                }
+                else {
+                    // for dolby vision avc, csd0 also holds csd1
+                    size_t i = 0;
+                    int csd0realsize = 0;
+                    do {
+                        i = findNextNalStartCode(csd0->data() + i,
+                                        csd0->size() - i) - csd0->data();
+                        if (i > 0) {
+                            csd0realsize = i;
+                            break;
+                        }
+                        i += 4;
+                    } while(i < csd0->size());
+                    // buffer0 -> csd0
+                    sp<ABuffer> buffer0 = new (std::nothrow) ABuffer(csd0realsize);
+                    if (buffer0.get() == NULL || buffer0->base() == NULL) {
+                        return NO_MEMORY;
+                    }
+                    memcpy(buffer0->data(), csd0->data(), csd0realsize);
+                    // buffer1 -> csd1
+                    sp<ABuffer> buffer1 = new (std::nothrow)
+                            ABuffer(csd0->size() - csd0realsize);
+                    if (buffer1.get() == NULL || buffer1->base() == NULL) {
+                        return NO_MEMORY;
+                    }
+                    memcpy(buffer1->data(), csd0->data()+csd0realsize,
+                                csd0->size() - csd0realsize);
+
+                    std::vector<char> avcc(csd0->size() + 1024);
+                    size_t outsize = reassembleAVCC(buffer0, buffer1, avcc.data());
+                    meta->setData(kKeyAVCC, kTypeAVCC, avcc.data(), outsize);
+                }
             }
         } else if (mime == MEDIA_MIMETYPE_VIDEO_VP9) {
             meta->setData(kKeyVp9CodecPrivate, 0, csd0->data(), csd0->size());
@@ -2131,29 +2319,29 @@
 }
 
 struct aac_format_conv_t {
-    OMX_AUDIO_AACPROFILETYPE eAacProfileType;
+    int32_t eAacProfileType;
     audio_format_t format;
 };
 
 static const struct aac_format_conv_t profileLookup[] = {
-    { OMX_AUDIO_AACObjectMain,        AUDIO_FORMAT_AAC_MAIN},
-    { OMX_AUDIO_AACObjectLC,          AUDIO_FORMAT_AAC_LC},
-    { OMX_AUDIO_AACObjectSSR,         AUDIO_FORMAT_AAC_SSR},
-    { OMX_AUDIO_AACObjectLTP,         AUDIO_FORMAT_AAC_LTP},
-    { OMX_AUDIO_AACObjectHE,          AUDIO_FORMAT_AAC_HE_V1},
-    { OMX_AUDIO_AACObjectScalable,    AUDIO_FORMAT_AAC_SCALABLE},
-    { OMX_AUDIO_AACObjectERLC,        AUDIO_FORMAT_AAC_ERLC},
-    { OMX_AUDIO_AACObjectLD,          AUDIO_FORMAT_AAC_LD},
-    { OMX_AUDIO_AACObjectHE_PS,       AUDIO_FORMAT_AAC_HE_V2},
-    { OMX_AUDIO_AACObjectELD,         AUDIO_FORMAT_AAC_ELD},
-    { OMX_AUDIO_AACObjectXHE,         AUDIO_FORMAT_AAC_XHE},
-    { OMX_AUDIO_AACObjectNull,        AUDIO_FORMAT_AAC},
+    { AACObjectMain,        AUDIO_FORMAT_AAC_MAIN},
+    { AACObjectLC,          AUDIO_FORMAT_AAC_LC},
+    { AACObjectSSR,         AUDIO_FORMAT_AAC_SSR},
+    { AACObjectLTP,         AUDIO_FORMAT_AAC_LTP},
+    { AACObjectHE,          AUDIO_FORMAT_AAC_HE_V1},
+    { AACObjectScalable,    AUDIO_FORMAT_AAC_SCALABLE},
+    { AACObjectERLC,        AUDIO_FORMAT_AAC_ERLC},
+    { AACObjectLD,          AUDIO_FORMAT_AAC_LD},
+    { AACObjectHE_PS,       AUDIO_FORMAT_AAC_HE_V2},
+    { AACObjectELD,         AUDIO_FORMAT_AAC_ELD},
+    { AACObjectXHE,         AUDIO_FORMAT_AAC_XHE},
+    { AACObjectNull,        AUDIO_FORMAT_AAC},
 };
 
 void mapAACProfileToAudioFormat( audio_format_t& format, uint64_t eAacProfile)
 {
-const struct aac_format_conv_t* p = &profileLookup[0];
-    while (p->eAacProfileType != OMX_AUDIO_AACObjectNull) {
+    const struct aac_format_conv_t* p = &profileLookup[0];
+    while (p->eAacProfileType != AACObjectNull) {
         if (eAacProfile == p->eAacProfileType) {
             format = p->format;
             return;
@@ -2193,7 +2381,7 @@
     // Offloading depends on audio DSP capabilities.
     int32_t aacaot = -1;
     if (meta->findInt32(kKeyAACAOT, &aacaot)) {
-        mapAACProfileToAudioFormat(info->format,(OMX_AUDIO_AACPROFILETYPE) aacaot);
+        mapAACProfileToAudioFormat(info->format, aacaot);
     }
 
     int32_t srate = -1;
diff --git a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp
index fb6c4e2..bb1cb0b 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp
+++ b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp
@@ -354,7 +354,7 @@
             }
 
             if (mpeg4type->eProfile != OMX_VIDEO_MPEG4ProfileCore ||
-                mpeg4type->eLevel != OMX_VIDEO_MPEG4Level2 ||
+                mpeg4type->eLevel > OMX_VIDEO_MPEG4Level2 ||
                 (mpeg4type->nAllowedPictureTypes & OMX_VIDEO_PictureTypeB) ||
                 mpeg4type->nBFrames != 0 ||
                 mpeg4type->nIDCVLCThreshold != 0 ||
diff --git a/media/libstagefright/colorconversion/ColorConverter.cpp b/media/libstagefright/colorconversion/ColorConverter.cpp
index c7dc415..6004cf8 100644
--- a/media/libstagefright/colorconversion/ColorConverter.cpp
+++ b/media/libstagefright/colorconversion/ColorConverter.cpp
@@ -23,6 +23,7 @@
 #include <media/stagefright/foundation/ALooper.h>
 #include <media/stagefright/foundation/ColorUtils.h>
 #include <media/stagefright/ColorConverter.h>
+#include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/MediaErrors.h>
 
 #include "libyuv/convert_from.h"
@@ -51,13 +52,17 @@
 static bool isRGB(OMX_COLOR_FORMATTYPE colorFormat) {
     return colorFormat == OMX_COLOR_Format16bitRGB565
             || colorFormat == OMX_COLOR_Format32BitRGBA8888
-            || colorFormat == OMX_COLOR_Format32bitBGRA8888;
+            || colorFormat == OMX_COLOR_Format32bitBGRA8888
+            || colorFormat == COLOR_Format32bitABGR2101010;
 }
 
 bool ColorConverter::ColorSpace::isBt709() {
     return (mStandard == ColorUtils::kColorStandardBT709);
 }
 
+bool ColorConverter::ColorSpace::isBt2020() {
+    return (mStandard == ColorUtils::kColorStandardBT2020);
+}
 
 bool ColorConverter::ColorSpace::isJpeg() {
     return ((mStandard == ColorUtils::kColorStandardBT601_625)
@@ -70,16 +75,19 @@
     : mSrcFormat(from),
       mDstFormat(to),
       mSrcColorSpace({0, 0, 0}),
-      mClip(NULL) {
+      mClip(NULL),
+      mClip10Bit(NULL) {
 }
 
 ColorConverter::~ColorConverter() {
     delete[] mClip;
     mClip = NULL;
+    delete[] mClip10Bit;
+    mClip10Bit = NULL;
 }
 
 bool ColorConverter::isValid() const {
-    switch (mSrcFormat) {
+    switch ((int32_t)mSrcFormat) {
         case OMX_COLOR_FormatYUV420Planar16:
             if (mDstFormat == OMX_COLOR_FormatYUV444Y410) {
                 return true;
@@ -102,6 +110,8 @@
 #else
             return mDstFormat == OMX_COLOR_Format16bitRGB565;
 #endif
+        case COLOR_FormatYUVP010:
+            return mDstFormat == COLOR_Format32bitABGR2101010;
 
         default:
             return false;
@@ -143,9 +153,10 @@
       mCropTop(cropTop),
       mCropRight(cropRight),
       mCropBottom(cropBottom) {
-    switch(mColorFormat) {
+    switch((int32_t)mColorFormat) {
     case OMX_COLOR_Format16bitRGB565:
     case OMX_COLOR_FormatYUV420Planar16:
+    case COLOR_FormatYUVP010:
     case OMX_COLOR_FormatCbYCrY:
         mBpp = 2;
         mStride = 2 * mWidth;
@@ -153,6 +164,7 @@
 
     case OMX_COLOR_Format32bitBGRA8888:
     case OMX_COLOR_Format32BitRGBA8888:
+    case COLOR_Format32bitABGR2101010:
     case OMX_COLOR_FormatYUV444Y410:
         mBpp = 4;
         mStride = 4 * mWidth;
@@ -213,7 +225,7 @@
 
     status_t err;
 
-    switch (mSrcFormat) {
+    switch ((int32_t)mSrcFormat) {
         case OMX_COLOR_FormatYUV420Planar:
 #ifdef USE_LIBYUV
             err = convertYUV420PlanarUseLibYUV(src, dst);
@@ -235,6 +247,19 @@
             break;
         }
 
+        case COLOR_FormatYUVP010:
+        {
+#if PERF_PROFILING
+            int64_t startTimeUs = ALooper::GetNowUs();
+#endif
+            err = convertYUVP010(src, dst);
+#if PERF_PROFILING
+            int64_t endTimeUs = ALooper::GetNowUs();
+            ALOGD("convertYUVP010 took %lld us", (long long) (endTimeUs - startTimeUs));
+#endif
+            break;
+        }
+
         case OMX_COLOR_FormatCbYCrY:
             err = convertCbYCrY(src, dst);
             break;
@@ -439,23 +464,23 @@
 }
 
 std::function<void (void *, bool, signed, signed, signed, signed, signed, signed)>
-getWriteToDst(OMX_COLOR_FORMATTYPE dstFormat, uint8_t *kAdjustedClip) {
-    switch (dstFormat) {
+getWriteToDst(OMX_COLOR_FORMATTYPE dstFormat, void *kAdjustedClip) {
+    switch ((int)dstFormat) {
     case OMX_COLOR_Format16bitRGB565:
     {
         return [kAdjustedClip](void *dst_ptr, bool uncropped,
                                signed r1, signed g1, signed b1,
                                signed r2, signed g2, signed b2) {
             uint32_t rgb1 =
-                ((kAdjustedClip[r1] >> 3) << 11)
-                | ((kAdjustedClip[g1] >> 2) << 5)
-                | (kAdjustedClip[b1] >> 3);
+                ((((uint8_t *)kAdjustedClip)[r1] >> 3) << 11)
+                | ((((uint8_t *)kAdjustedClip)[g1] >> 2) << 5)
+                | (((uint8_t *)kAdjustedClip)[b1] >> 3);
 
             if (uncropped) {
                 uint32_t rgb2 =
-                    ((kAdjustedClip[r2] >> 3) << 11)
-                    | ((kAdjustedClip[g2] >> 2) << 5)
-                    | (kAdjustedClip[b2] >> 3);
+                    ((((uint8_t *)kAdjustedClip)[r2] >> 3) << 11)
+                    | ((((uint8_t *)kAdjustedClip)[g2] >> 2) << 5)
+                    | (((uint8_t *)kAdjustedClip)[b2] >> 3);
 
                 *(uint32_t *)dst_ptr = (rgb2 << 16) | rgb1;
             } else {
@@ -469,16 +494,16 @@
                                signed r1, signed g1, signed b1,
                                signed r2, signed g2, signed b2) {
             ((uint32_t *)dst_ptr)[0] =
-                    (kAdjustedClip[r1])
-                    | (kAdjustedClip[g1] << 8)
-                    | (kAdjustedClip[b1] << 16)
+                    (((uint8_t *)kAdjustedClip)[r1])
+                    | (((uint8_t *)kAdjustedClip)[g1] << 8)
+                    | (((uint8_t *)kAdjustedClip)[b1] << 16)
                     | (0xFF << 24);
 
             if (uncropped) {
                 ((uint32_t *)dst_ptr)[1] =
-                        (kAdjustedClip[r2])
-                        | (kAdjustedClip[g2] << 8)
-                        | (kAdjustedClip[b2] << 16)
+                        (((uint8_t *)kAdjustedClip)[r2])
+                        | (((uint8_t *)kAdjustedClip)[g2] << 8)
+                        | (((uint8_t *)kAdjustedClip)[b2] << 16)
                         | (0xFF << 24);
             }
         };
@@ -489,20 +514,41 @@
                                signed r1, signed g1, signed b1,
                                signed r2, signed g2, signed b2) {
             ((uint32_t *)dst_ptr)[0] =
-                    (kAdjustedClip[b1])
-                    | (kAdjustedClip[g1] << 8)
-                    | (kAdjustedClip[r1] << 16)
+                    (((uint8_t *)kAdjustedClip)[b1])
+                    | (((uint8_t *)kAdjustedClip)[g1] << 8)
+                    | (((uint8_t *)kAdjustedClip)[r1] << 16)
                     | (0xFF << 24);
 
             if (uncropped) {
                 ((uint32_t *)dst_ptr)[1] =
-                        (kAdjustedClip[b2])
-                        | (kAdjustedClip[g2] << 8)
-                        | (kAdjustedClip[r2] << 16)
+                        (((uint8_t *)kAdjustedClip)[b2])
+                        | (((uint8_t *)kAdjustedClip)[g2] << 8)
+                        | (((uint8_t *)kAdjustedClip)[r2] << 16)
                         | (0xFF << 24);
             }
         };
     }
+    case COLOR_Format32bitABGR2101010:
+    {
+        return [kAdjustedClip](void *dst_ptr, bool uncropped,
+                               signed r1, signed g1, signed b1,
+                               signed r2, signed g2, signed b2) {
+            ((uint32_t *)dst_ptr)[0] =
+                    (((uint16_t *)kAdjustedClip)[r1])
+                    | (((uint16_t *)kAdjustedClip)[g1] << 10)
+                    | (((uint16_t *)kAdjustedClip)[b1] << 20)
+                    | (3 << 30);
+
+            if (uncropped) {
+                ((uint32_t *)dst_ptr)[1] =
+                        (((uint16_t *)kAdjustedClip)[r2])
+                        | (((uint16_t *)kAdjustedClip)[g2] << 10)
+                        | (((uint16_t *)kAdjustedClip)[b2] << 20)
+                        | (3 << 30);
+            }
+        };
+    }
+
     default:
         TRESPASS();
     }
@@ -514,7 +560,7 @@
     uint8_t *kAdjustedClip = initClip();
 
     auto readFromSrc = getReadFromSrc(mSrcFormat);
-    auto writeToDst = getWriteToDst(mDstFormat, kAdjustedClip);
+    auto writeToDst = getWriteToDst(mDstFormat, (void *)kAdjustedClip);
 
     uint8_t *dst_ptr = (uint8_t *)dst.mBits
             + dst.mCropTop * dst.mStride + dst.mCropLeft * dst.mBpp;
@@ -591,34 +637,116 @@
     return convertYUV420Planar(src, dst);
 }
 
-/*
- * Pack 10-bit YUV into RGBA_1010102.
- *
- * Media sends 10-bit YUV in a RGBA_1010102 format buffer. SF will handle
- * the conversion to RGB using RenderEngine fallback.
- *
- * We do not perform a YUV->RGB conversion here, however the conversion with
- * BT2020 to Full range is below for reference:
- *
- *   B = 1.168  *(Y - 64) + 2.148  *(U - 512)
- *   G = 1.168  *(Y - 64) - 0.652  *(V - 512) - 0.188  *(U - 512)
- *   R = 1.168  *(Y - 64) + 1.683  *(V - 512)
- *
- *   B = 1196/1024  *(Y - 64) + 2200/1024  *(U - 512)
- *   G = .................... -  668/1024  *(V - 512) - 192/1024  *(U - 512)
- *   R = .................... + 1723/1024  *(V - 512)
- *
- *   min_B = (1196  *(- 64) + 2200  *(- 512)) / 1024 = -1175
- *   min_G = (1196  *(- 64) - 668  *(1023 - 512) - 192  *(1023 - 512)) / 1024 = -504
- *   min_R = (1196  *(- 64) + 1723  *(- 512)) / 1024 = -937
- *
- *   max_B = (1196  *(1023 - 64) + 2200  *(1023 - 512)) / 1024 = 2218
- *   max_G = (1196  *(1023 - 64) - 668  *(- 512) - 192  *(- 512)) / 1024 = 1551
- *   max_R = (1196  *(1023 - 64) + 1723  *(1023 - 512)) / 1024 = 1980
- *
- *   clip range -1175 .. 2218
- *
- */
+status_t ColorConverter::convertYUVP010(
+        const BitmapParams &src, const BitmapParams &dst) {
+    if (mDstFormat == COLOR_Format32bitABGR2101010) {
+        return convertYUVP010ToRGBA1010102(src, dst);
+    }
+
+    return ERROR_UNSUPPORTED;
+}
+
+status_t ColorConverter::convertYUVP010ToRGBA1010102(
+        const BitmapParams &src, const BitmapParams &dst) {
+    uint16_t *kAdjustedClip10bit = initClip10Bit();
+
+//    auto readFromSrc = getReadFromSrc(mSrcFormat);
+    auto writeToDst = getWriteToDst(mDstFormat, (void *)kAdjustedClip10bit);
+
+    uint8_t *dst_ptr = (uint8_t *)dst.mBits
+            + dst.mCropTop * dst.mStride + dst.mCropLeft * dst.mBpp;
+
+    uint16_t *src_y = (uint16_t *)((uint8_t *)src.mBits
+            + src.mCropTop * src.mStride + src.mCropLeft * src.mBpp);
+
+    uint16_t *src_uv = (uint16_t *)((uint8_t *)src.mBits
+            + src.mStride * src.mHeight
+            + (src.mCropTop / 2) * src.mStride + src.mCropLeft * src.mBpp);
+
+    // BT.2020 Limited Range conversion
+
+    // B = 1.168  *(Y - 64) + 2.148  *(U - 512)
+    // G = 1.168  *(Y - 64) - 0.652  *(V - 512) - 0.188  *(U - 512)
+    // R = 1.168  *(Y - 64) + 1.683  *(V - 512)
+
+    // B = 1196/1024  *(Y - 64) + 2200/1024  *(U - 512)
+    // G = .................... -  668/1024  *(V - 512) - 192/1024  *(U - 512)
+    // R = .................... + 1723/1024  *(V - 512)
+
+    // min_B = (1196  *(- 64) + 2200  *(- 512)) / 1024 = -1175
+    // min_G = (1196  *(- 64) - 668  *(1023 - 512) - 192  *(1023 - 512)) / 1024 = -504
+    // min_R = (1196  *(- 64) + 1723  *(- 512)) / 1024 = -937
+
+    // max_B = (1196  *(1023 - 64) + 2200  *(1023 - 512)) / 1024 = 2218
+    // max_G = (1196  *(1023 - 64) - 668  *(- 512) - 192  *(- 512)) / 1024 = 1551
+    // max_R = (1196  *(1023 - 64) + 1723  *(1023 - 512)) / 1024 = 1980
+
+    // clip range -1175 .. 2218
+
+    // BT.709 Limited Range conversion
+
+    // B = 1.164 * (Y - 64) + 2.018 * (U - 512)
+    // G = 1.164 * (Y - 64) - 0.813 * (V - 512) - 0.391 * (U - 512)
+    // R = 1.164 * (Y - 64) + 1.596 * (V - 512)
+
+    // B = 1192/1024 * (Y - 64) + 2068/1024 * (U - 512)
+    // G = .................... -  832/1024 * (V - 512) - 400/1024 * (U - 512)
+    // R = .................... + 1636/1024 * (V - 512)
+
+    // min_B = (1192 * (- 64) + 2068 * (- 512)) / 1024 = -1108
+
+    // max_B = (1192 * (1023 - 64) + 517 * (1023 - 512)) / 1024 = 2148
+
+    // clip range -1108 .. 2148
+
+    signed mY = 1196, mU_B = 2200, mV_G = -668, mV_R = 1723, mU_G = -192;
+    if (!mSrcColorSpace.isBt2020()) {
+        mY = 1192;
+        mU_B = 2068;
+        mV_G = -832;
+        mV_R = 1636;
+        mU_G = -400;
+    }
+    for (size_t y = 0; y < src.cropHeight(); ++y) {
+        for (size_t x = 0; x < src.cropWidth(); x += 2) {
+            signed y1, y2, u, v;
+            y1 = (src_y[x] >> 6) - 64;
+            y2 = (src_y[x + 1] >> 6) - 64;
+            u = int(src_uv[x] >> 6) - 512;
+            v = int(src_uv[x + 1] >> 6) - 512;
+
+            signed u_b = u * mU_B;
+            signed u_g = u * mU_G;
+            signed v_g = v * mV_G;
+            signed v_r = v * mV_R;
+
+            signed tmp1 = y1 * mY;
+            signed b1 = (tmp1 + u_b) / 1024;
+            signed g1 = (tmp1 + v_g + u_g) / 1024;
+            signed r1 = (tmp1 + v_r) / 1024;
+
+            signed tmp2 = y2 * mY;
+            signed b2 = (tmp2 + u_b) / 1024;
+            signed g2 = (tmp2 + v_g + u_g) / 1024;
+            signed r2 = (tmp2 + v_r) / 1024;
+
+            bool uncropped = x + 1 < src.cropWidth();
+
+            writeToDst(dst_ptr + x * dst.mBpp, uncropped, r1, g1, b1, r2, g2, b2);
+        }
+
+        src_y += src.mStride / 2;
+
+        if (y & 1) {
+            src_uv += src.mStride / 2;
+        }
+
+        dst_ptr += dst.mStride;
+    }
+
+    return OK;
+}
+
 
 #if !USE_NEON_Y410
 
@@ -1033,4 +1161,19 @@
     return &mClip[-kClipMin];
 }
 
+uint16_t *ColorConverter::initClip10Bit() {
+    static const signed kClipMin = -1176;
+    static const signed kClipMax = 2219;
+
+    if (mClip10Bit == NULL) {
+        mClip10Bit = new uint16_t[kClipMax - kClipMin + 1];
+
+        for (signed i = kClipMin; i <= kClipMax; ++i) {
+            mClip10Bit[i - kClipMin] = (i < 0) ? 0 : (i > 1023) ? 1023 : (uint16_t)i;
+        }
+    }
+
+    return &mClip10Bit[-kClipMin];
+}
+
 }  // namespace android
diff --git a/media/libstagefright/data/media_codecs_sw.xml b/media/libstagefright/data/media_codecs_sw.xml
index a4e3425..53ca4e7 100644
--- a/media/libstagefright/data/media_codecs_sw.xml
+++ b/media/libstagefright/data/media_codecs_sw.xml
@@ -91,11 +91,11 @@
         <MediaCodec name="c2.android.mpeg4.decoder" type="video/mp4v-es">
             <Alias name="OMX.google.mpeg4.decoder" />
             <!-- profiles and levels:  ProfileSimple : Level3 -->
-            <Limit name="size" min="2x2" max="352x288" />
+            <Limit name="size" min="2x2" max="1920x1920" />
             <Limit name="alignment" value="2x2" />
-            <Limit name="block-size" value="16x16" />
-            <Limit name="blocks-per-second" range="12-11880" />
-            <Limit name="bitrate" range="1-384000" />
+            <Limit name="block-count" range="1-14400" />
+            <Limit name="blocks-per-second" range="1-432000" />
+            <Limit name="bitrate" range="1-40000000" />
             <Feature name="adaptive-playback" />
         </MediaCodec>
         <MediaCodec name="c2.android.h263.decoder" type="video/3gpp">
diff --git a/media/libstagefright/foundation/AMessage.cpp b/media/libstagefright/foundation/AMessage.cpp
index c2114b3..5c99cc9 100644
--- a/media/libstagefright/foundation/AMessage.cpp
+++ b/media/libstagefright/foundation/AMessage.cpp
@@ -33,7 +33,7 @@
 
 #include <media/stagefright/foundation/hexdump.h>
 
-#if !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
+#if defined(__ANDROID__) && !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
 #include <binder/Parcel.h>
 #endif
 
@@ -659,7 +659,7 @@
     return s;
 }
 
-#if !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
+#if defined(__ANDROID__) && !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
 // static
 sp<AMessage> AMessage::FromParcel(const Parcel &parcel, size_t maxNestingLevel) {
     int32_t what = parcel.readInt32();
@@ -825,7 +825,7 @@
         }
     }
 }
-#endif  // !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
+#endif  // defined(__ANDROID__) && !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
 
 sp<AMessage> AMessage::changesFrom(const sp<const AMessage> &other, bool deep) const {
     if (other == NULL) {
diff --git a/media/libstagefright/foundation/AString.cpp b/media/libstagefright/foundation/AString.cpp
index b1ed077..a5e0ff8 100644
--- a/media/libstagefright/foundation/AString.cpp
+++ b/media/libstagefright/foundation/AString.cpp
@@ -27,7 +27,7 @@
 #include "ADebug.h"
 #include "AString.h"
 
-#if !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
+#if defined(__ANDROID__) && !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
 #include <binder/Parcel.h>
 #endif
 
@@ -365,7 +365,7 @@
     return !strcasecmp(mData + mSize - suffixLen, suffix);
 }
 
-#if !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
+#if defined(__ANDROID__) && !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
 // static
 AString AString::FromParcel(const Parcel &parcel) {
     size_t size = static_cast<size_t>(parcel.readInt32());
@@ -380,7 +380,7 @@
     }
     return err;
 }
-#endif // !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
+#endif // defined(__ANDROID__) && !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
 
 AString AStringPrintf(const char *format, ...) {
     va_list ap;
diff --git a/media/libstagefright/foundation/Android.bp b/media/libstagefright/foundation/Android.bp
index dd2c66f..29360b1 100644
--- a/media/libstagefright/foundation/Android.bp
+++ b/media/libstagefright/foundation/Android.bp
@@ -120,8 +120,6 @@
         },
     },
 
-    clang: true,
-
     sanitize: {
         misc_undefined: [
             "unsigned-integer-overflow",
@@ -165,7 +163,7 @@
 
     shared_libs: [
         "liblog",
-        "libutils",             // for sp<>
+        "libutils", // for sp<>
         // actually invokes this, but called from folks who already load it
         // "libmediandk",
     ],
@@ -193,8 +191,6 @@
         "ColorUtils_fill.cpp",
     ],
 
-    clang: true,
-
     sanitize: {
         misc_undefined: [
             "unsigned-integer-overflow",
@@ -211,4 +207,3 @@
     ],
 
 }
-
diff --git a/media/libstagefright/foundation/ColorUtils.cpp b/media/libstagefright/foundation/ColorUtils.cpp
index fa722b5..6dc8157 100644
--- a/media/libstagefright/foundation/ColorUtils.cpp
+++ b/media/libstagefright/foundation/ColorUtils.cpp
@@ -590,9 +590,10 @@
     uint32_t gfxRange = range;
     uint32_t gfxStandard = standard;
     uint32_t gfxTransfer = transfer;
-    // TRICKY: use & to ensure all three mappings are completed
-    if (!(sGfxRanges.map(range, &gfxRange) & sGfxStandards.map(standard, &gfxStandard)
-            & sGfxTransfers.map(transfer, &gfxTransfer))) {
+    bool mappedRange = sGfxRanges.map(range, &gfxRange);
+    bool mappedStandard = sGfxStandards.map(standard, &gfxStandard);
+    bool mappedTransfer = sGfxTransfers.map(transfer, &gfxTransfer);
+    if (! (mappedRange && mappedStandard && mappedTransfer)) {
         ALOGW("could not safely map platform color aspects (R:%u(%s) S:%u(%s) T:%u(%s) to "
               "graphics dataspace (R:%u S:%u T:%u)",
               range, asString(range), standard, asString(standard), transfer, asString(transfer),
@@ -626,9 +627,10 @@
     CU::ColorRange    cuRange    = CU::kColorRangeUnspecified;
     CU::ColorStandard cuStandard = CU::kColorStandardUnspecified;
     CU::ColorTransfer cuTransfer = CU::kColorTransferUnspecified;
-    // TRICKY: use & to ensure all three mappings are completed
-    if (!(sGfxRanges.map(gfxRange, &cuRange) & sGfxStandards.map(gfxStandard, &cuStandard)
-            & sGfxTransfers.map(gfxTransfer, &cuTransfer))) {
+    bool mappedRange = sGfxRanges.map(gfxRange, &cuRange);
+    bool mappedStandard = sGfxStandards.map(gfxStandard, &cuStandard);
+    bool mappedTransfer = sGfxTransfers.map(gfxTransfer, &cuTransfer);
+    if (! (mappedRange && mappedStandard && mappedTransfer)) {
         ALOGW("could not safely map graphics dataspace (R:%u S:%u T:%u) to "
               "platform color aspects (R:%u(%s) S:%u(%s) T:%u(%s)",
               gfxRange, gfxStandard, gfxTransfer,
@@ -781,5 +783,14 @@
     return true;
 }
 
+// static
+bool ColorUtils::isHDRStaticInfoValid(HDRStaticInfo *info) {
+    if (info->sType1.mMaxDisplayLuminance > 0.0f
+        && info->sType1.mMinDisplayLuminance > 0.0f)  return true;
+    if (info->sType1.mMaxContentLightLevel > 0.0f
+        && info->sType1.mMaxFrameAverageLightLevel > 0.0f)  return true;
+    return false;
+}
+
 }  // namespace android
 
diff --git a/media/libstagefright/foundation/MetaData.cpp b/media/libstagefright/foundation/MetaData.cpp
index 7f48cfd..77913d5 100644
--- a/media/libstagefright/foundation/MetaData.cpp
+++ b/media/libstagefright/foundation/MetaData.cpp
@@ -28,7 +28,7 @@
 #include <media/stagefright/foundation/hexdump.h>
 #include <media/stagefright/MetaData.h>
 
-#if !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
+#if defined(__ANDROID__) && !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
 #include <binder/Parcel.h>
 #endif
 
@@ -48,7 +48,7 @@
 MetaData::~MetaData() {
 }
 
-#if !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
+#if defined(__ANDROID__) && !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
 /* static */
 sp<MetaData> MetaData::createFromParcel(const Parcel &parcel) {
 
diff --git a/media/libstagefright/foundation/MetaDataBase.cpp b/media/libstagefright/foundation/MetaDataBase.cpp
index 3f050ea..980eb22 100644
--- a/media/libstagefright/foundation/MetaDataBase.cpp
+++ b/media/libstagefright/foundation/MetaDataBase.cpp
@@ -28,7 +28,7 @@
 #include <media/stagefright/foundation/hexdump.h>
 #include <media/stagefright/MetaDataBase.h>
 
-#if !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
+#if defined(__ANDROID__) && !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
 #include <binder/Parcel.h>
 #endif
 
@@ -452,7 +452,7 @@
     }
 }
 
-#if !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
+#if defined(__ANDROID__) && !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
 status_t MetaDataBase::writeToParcel(Parcel &parcel) {
     status_t ret;
     size_t numItems = mInternalData->mItems.size();
@@ -532,7 +532,7 @@
     ALOGW("no metadata in parcel");
     return UNKNOWN_ERROR;
 }
-#endif // !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
+#endif // defined(__ANDROID__) && !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
 
 }  // namespace android
 
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h b/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h
index a2b6c4f..72c8074 100644
--- a/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h
+++ b/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h
@@ -193,6 +193,9 @@
     static void setHDRStaticInfoIntoAMediaFormat(const HDRStaticInfo &info, AMediaFormat *format);
     // (internal) used by the setHDRStaticInfoInfo* routines
     static void fillHdrStaticInfoBuffer( const HDRStaticInfo &info, uint8_t *data);
+
+    // determine whether HDR static info is valid
+    static bool isHDRStaticInfoValid(HDRStaticInfo *info);
 };
 
 inline static const char *asString(android::ColorUtils::ColorStandard i, const char *def = "??") {
diff --git a/media/libstagefright/httplive/Android.bp b/media/libstagefright/httplive/Android.bp
index 0b0acbf..7acf735 100644
--- a/media/libstagefright/httplive/Android.bp
+++ b/media/libstagefright/httplive/Android.bp
@@ -29,7 +29,6 @@
     ],
 
     include_dirs: [
-        "frameworks/av/media/libstagefright",
         "frameworks/native/include/media/openmax",
     ],
 
@@ -65,6 +64,8 @@
 
     header_libs: [
         "libbase_headers",
+        "libstagefright_headers",
+        "libstagefright_httplive_headers",
     ],
 
     static_libs: [
@@ -74,3 +75,8 @@
     ],
 
 }
+
+cc_library_headers {
+    name: "libstagefright_httplive_headers",
+    export_include_dirs: ["."],
+}
diff --git a/media/libstagefright/httplive/LiveSession.cpp b/media/libstagefright/httplive/LiveSession.cpp
index 3bad015..0d7cadd 100644
--- a/media/libstagefright/httplive/LiveSession.cpp
+++ b/media/libstagefright/httplive/LiveSession.cpp
@@ -23,7 +23,7 @@
 #include "M3UParser.h"
 #include "PlaylistFetcher.h"
 
-#include "mpeg2ts/AnotherPacketSource.h"
+#include <AnotherPacketSource.h>
 
 #include <cutils/properties.h>
 #include <media/MediaHTTPService.h>
diff --git a/media/libstagefright/httplive/LiveSession.h b/media/libstagefright/httplive/LiveSession.h
index 7a6d487..ceea41d 100644
--- a/media/libstagefright/httplive/LiveSession.h
+++ b/media/libstagefright/httplive/LiveSession.h
@@ -24,7 +24,7 @@
 
 #include <utils/String8.h>
 
-#include "mpeg2ts/ATSParser.h"
+#include <ATSParser.h>
 
 namespace android {
 
diff --git a/media/libstagefright/httplive/PlaylistFetcher.cpp b/media/libstagefright/httplive/PlaylistFetcher.cpp
index b23aa8a..907b326 100644
--- a/media/libstagefright/httplive/PlaylistFetcher.cpp
+++ b/media/libstagefright/httplive/PlaylistFetcher.cpp
@@ -24,9 +24,9 @@
 #include "HTTPDownloader.h"
 #include "LiveSession.h"
 #include "M3UParser.h"
-#include "include/ID3.h"
-#include "mpeg2ts/AnotherPacketSource.h"
-#include "mpeg2ts/HlsSampleDecryptor.h"
+#include <ID3.h>
+#include <AnotherPacketSource.h>
+#include <HlsSampleDecryptor.h>
 
 #include <datasource/DataURISource.h>
 #include <media/stagefright/foundation/ABitReader.h>
diff --git a/media/libstagefright/httplive/PlaylistFetcher.h b/media/libstagefright/httplive/PlaylistFetcher.h
index 5d3f9c1..2e28164 100644
--- a/media/libstagefright/httplive/PlaylistFetcher.h
+++ b/media/libstagefright/httplive/PlaylistFetcher.h
@@ -21,7 +21,7 @@
 #include <media/stagefright/foundation/AHandler.h>
 #include <openssl/aes.h>
 
-#include "mpeg2ts/ATSParser.h"
+#include <ATSParser.h>
 #include "LiveSession.h"
 
 namespace android {
diff --git a/media/libstagefright/httplive/fuzzer/Android.bp b/media/libstagefright/httplive/fuzzer/Android.bp
new file mode 100644
index 0000000..85fd8b7
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/Android.bp
@@ -0,0 +1,66 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_media_libstagefright_httplive_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: [
+        "frameworks_av_media_libstagefright_httplive_license",
+    ],
+}
+
+cc_fuzz {
+    name: "httplive_fuzzer",
+    srcs: [
+        "httplive_fuzzer.cpp",
+    ],
+    static_libs: [
+        "libstagefright_httplive",
+        "libstagefright_id3",
+        "libstagefright_metadatautils",
+        "libstagefright_mpeg2support",
+        "liblog",
+        "libcutils",
+        "libdatasource",
+        "libmedia",
+        "libstagefright",
+    ],
+    header_libs: [
+        "libbase_headers",
+        "libstagefright_foundation_headers",
+        "libstagefright_headers",
+        "libstagefright_httplive_headers",
+    ],
+    shared_libs: [
+        "libcrypto",
+        "libstagefright_foundation",
+        "libhidlbase",
+        "libhidlmemory",
+        "libutils",
+        "android.hidl.allocator@1.0",
+    ],
+    corpus: ["corpus/*"],
+    dictionary: "httplive_fuzzer.dict",
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
diff --git a/media/libstagefright/httplive/fuzzer/README.md b/media/libstagefright/httplive/fuzzer/README.md
new file mode 100644
index 0000000..3a64ea4
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/README.md
@@ -0,0 +1,56 @@
+# Fuzzer for libstagefright_httplive
+
+## Plugin Design Considerations
+The fuzzer plugin for libstagefright_httplive is designed based on the understanding of the library and tries to achieve the following:
+
+##### Maximize code coverage
+The configuration parameters are not hardcoded, but instead selected based on
+incoming data.Also, several .m3u8 files are hand-crafted and added to the corpus directory to increase the code coverage. This ensures more code paths are reached by the fuzzer.
+
+libstagefright_httplive supports the following parameters:
+1. Final Result (parameter name: `finalResult`)
+2. Flags (parameter name: `flags`)
+3. Time Us (parameter name: `timeUs`)
+4. Track Index (parameter name: `trackIndex`)
+5. Index (parameter name: `index`)
+6. Select (parameter name: `select`)
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `finalResult` | `-34` to `-1` | Value obtained from FuzzedDataProvider|
+| `flags` | `0` to `1` | Value obtained from FuzzedDataProvider|
+| `timeUs` | `0` to `10000000` | Value obtained from FuzzedDataProvider|
+| `trackIndex` | `UINT32_MIN` to `UINT32_MAX` | Value obtained from FuzzedDataProvider|
+| `index` | `UINT32_MIN` to `UINT32_MAX` | Value obtained from FuzzedDataProvider|
+| `select` | `True` to `False` | Value obtained from FuzzedDataProvider|
+
+This also ensures that the plugin is always deterministic for any given input.
+
+##### Maximize utilization of input data
+The plugin feeds the entire input data to the httplive module.
+This ensures that the plugin tolerates any kind of input (empty, huge,
+malformed, etc) and doesnt `exit()` on any input and thereby increasing the
+chance of identifying vulnerabilities.
+
+## Build
+
+This describes steps to build httplive_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) httplive_fuzzer
+```
+#### Steps to run
+To run on device
+```
+  $ adb push $ANDROID_PRODUCT_OUT/data/fuzz/$(TARGET_ARCH)/lib /data/fuzz/$(TARGET_ARCH)/lib
+  $ adb push $ANDROID_PRODUCT_OUT/data/fuzz/$(TARGET_ARCH)/httplive_fuzzer /data/fuzz/$(TARGET_ARCH)/httplive_fuzzer
+  $ adb shell /data/fuzz/${TARGET_ARCH}/httplive_fuzzer/httplive_fuzzer /data/fuzz/${TARGET_ARCH}/httplive_fuzzer/corpus
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/media/libstagefright/httplive/fuzzer/corpus/crypt.key b/media/libstagefright/httplive/fuzzer/corpus/crypt.key
new file mode 100644
index 0000000..f9d5d7f
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/crypt.key
@@ -0,0 +1,2 @@

+ÏŒüÐ5Љ_xïHÎ3
diff --git a/media/libstagefright/httplive/fuzzer/corpus/encrypted.m3u8 b/media/libstagefright/httplive/fuzzer/corpus/encrypted.m3u8
new file mode 100644
index 0000000..32b0eac
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/encrypted.m3u8
@@ -0,0 +1,12 @@
+#EXTM3U
+#EXT-X-TARGETDURATION:10
+#EXT-X-ALLOW-CACHE:YES
+#EXT-X-PLAYLIST-TYPE:VOD
+#EXT-X-VERSION:3
+#EXT-X-MEDIA-SEQUENCE:1
+#EXT-X-KEY:METHOD=AES-128,URI="../../fuzz/arm64/httplive_fuzzer/corpus/crypt.key"
+#EXTINF:10.000,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXTINF:5.092,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXT-X-ENDLIST
diff --git a/media/libstagefright/httplive/fuzzer/corpus/hls.m3u8 b/media/libstagefright/httplive/fuzzer/corpus/hls.m3u8
new file mode 100644
index 0000000..9338e04
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/hls.m3u8
@@ -0,0 +1,8 @@
+#EXTM3U
+#EXT-X-TARGETDURATION:10
+#EXT-X-MEDIA-SEQUENCE:0
+#EXTINF:10, no desc
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXTINF:10, no desc
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence2.ts
+#EXT-X-ENDLIST
diff --git a/media/libstagefright/httplive/fuzzer/corpus/index1.m3u8 b/media/libstagefright/httplive/fuzzer/corpus/index1.m3u8
new file mode 100644
index 0000000..e1eff58
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/index1.m3u8
@@ -0,0 +1,14 @@
+#EXTM3U
+#EXT-X-VERSION:4
+#EXT-X-TARGETDURATION:5
+#EXT-X-KEY:METHOD=NONE
+#EXT-X-DISCONTINUITY-SEQUENCE:0
+#EXT-X-PLAYLIST-TYPE:VOD
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence0.ts
+#EXT-X-DISCONTINUITY
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence2.ts
+#EXT-X-ENDLIST
diff --git a/media/libstagefright/httplive/fuzzer/corpus/index2.m3u8 b/media/libstagefright/httplive/fuzzer/corpus/index2.m3u8
new file mode 100644
index 0000000..37a0189
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/index2.m3u8
@@ -0,0 +1,6 @@
+#EXTM3U
+#EXT-X-INDEPENDENT-SEGMENTS
+#EXT-X-STREAM-INF:CLOSED-CAPTIONS=NONE,BANDWIDTH=165340,RESOLUTION=256x144,CODECS="mp4a.40.5,avc1.42c00b"
+https://non.existentsite.com/test-doesnt-dereference-these-paths/prog_index.m3u8
+#EXT-X-STREAM-INF:CLOSED-CAPTIONS=NONE,BANDWIDTH=344388,RESOLUTION=426x240,CODECS="mp4a.40.5,avc1.4d4015"
+https://non.existentsite.com/test-doesnt-dereference-these-paths/prog_index1.m3u8
diff --git a/media/libstagefright/httplive/fuzzer/corpus/index3.m3u8 b/media/libstagefright/httplive/fuzzer/corpus/index3.m3u8
new file mode 100644
index 0000000..1b7f489
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/index3.m3u8
@@ -0,0 +1,13 @@
+#EXTM3U
+#EXT-X-VERSION:4
+#EXT-X-TARGETDURATION:5
+#EXT-X-KEY:METHOD=AES-128,URI="https://demo.unified-streaming.com/video/tears-of-steel/aes.key",IV=0X99b74007b6254e4bd1c6e03631cad15b
+#EXT-X-PLAYLIST-TYPE:VOD
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXT-X-DISCONTINUITY
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence2.ts
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence3.ts
+#EXT-X-ENDLIST
diff --git a/media/libstagefright/httplive/fuzzer/corpus/index4.m3u8 b/media/libstagefright/httplive/fuzzer/corpus/index4.m3u8
new file mode 100644
index 0000000..89ba37c
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/index4.m3u8
@@ -0,0 +1,15 @@
+#EXTM3U
+#EXT-X-VERSION:4
+#EXT-X-TARGETDURATION:5
+#EXT-X-KEY:METHOD=SAMPLE-AES,URI="data:text/plain;charset=utf-8,a4cd9995a1aa91e1",IV=0X99b74007b6254e4bd1c6e03631cad15b
+#EXT-X-DISCONTINUITY-SEQUENCE:0
+#EXT-X-PLAYLIST-TYPE:VOD
+#EXT-X-DISCONTINUITY
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence0.ts
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXT-X-DISCONTINUITY
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence2.ts
+#EXT-X-ENDLIST
diff --git a/media/libstagefright/httplive/fuzzer/corpus/index5.m3u8 b/media/libstagefright/httplive/fuzzer/corpus/index5.m3u8
new file mode 100644
index 0000000..2120de4
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/index5.m3u8
@@ -0,0 +1,14 @@
+#EXTM3U
+#EXT-X-TARGETDURATION:11
+#EXT-X-KEY:METHOD=NONE
+#EXT-X-MEDIA-SEQUENCE:0
+#EXT-X-VERSION:4
+#EXTINF:10.0,
+#EXT-X-BYTERANGE:10@0
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXTINF:10.0,
+#EXT-X-BYTERANGE:20@10
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXTINF:10.0,
+#EXT-X-BYTERANGE:80
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
diff --git a/media/libstagefright/httplive/fuzzer/corpus/index6.m3u8 b/media/libstagefright/httplive/fuzzer/corpus/index6.m3u8
new file mode 100644
index 0000000..588368a
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/index6.m3u8
@@ -0,0 +1,12 @@
+#EXTM3U
+#EXT-X-VERSION:4
+#EXT-X-TARGETDURATION:5
+#EXT-X-KEY:METHOD=AES-128,URI="data:text/plain;charset=utf-8,a4cd9995a1aa91e1",IV=0x30303030303030303030303030303030
+#EXT-X-PLAYLIST-TYPE:VOD
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence2.ts
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence3.ts
+#EXT-X-ENDLIST
diff --git a/media/libstagefright/httplive/fuzzer/corpus/index7.m3u8 b/media/libstagefright/httplive/fuzzer/corpus/index7.m3u8
new file mode 100644
index 0000000..b09948e
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/index7.m3u8
@@ -0,0 +1,46 @@
+#EXTM3U
+#EXT-X-VERSION:4
+## Created with Unified Streaming Platform  (version=1.11.3-24438)
+#EXT-X-SESSION-KEY:METHOD=AES-128,URI="https://demo.unified-streaming.com/video/tears-of-steel/aes.key"
+
+# AUDIO groups
+#EXT-X-MEDIA:TYPE=AUDIO,GROUP-ID="audio-aacl-64",LANGUAGE="en",NAME="English",DEFAULT=YES,AUTOSELECT=YES,CHANNELS="2"
+#EXT-X-MEDIA:TYPE=AUDIO,GROUP-ID="audio-aacl-128",LANGUAGE="en",NAME="English",DEFAULT=YES,AUTOSELECT=YES,CHANNELS="2"
+
+# SUBTITLES groups
+#EXT-X-MEDIA:TYPE=SUBTITLES,GROUP-ID="textstream",LANGUAGE="en",NAME="English",DEFAULT=YES,AUTOSELECT=YES,URI="tears-of-steel-aes-textstream_eng=1000.m3u8"
+#EXT-X-MEDIA:TYPE=SUBTITLES,GROUP-ID="textstream",LANGUAGE="ru",NAME="Russian",AUTOSELECT=YES,URI="tears-of-steel-aes-textstream_rus=1000.m3u8"
+
+# variants
+#EXT-X-STREAM-INF:BANDWIDTH=494000,CODECS="mp4a.40.2,avc1.42C00D",RESOLUTION=224x100,FRAME-RATE=24,AUDIO="audio-aacl-64",SUBTITLES="textstream",CLOSED-CAPTIONS=NONE
+tears-of-steel-aes-audio_eng=64008-video_eng=401000.m3u8
+#EXT-X-STREAM-INF:BANDWIDTH=933000,CODECS="mp4a.40.2,avc1.42C016",RESOLUTION=448x200,FRAME-RATE=24,AUDIO="audio-aacl-128",SUBTITLES="textstream",CLOSED-CAPTIONS=NONE
+tears-of-steel-aes-audio_eng=128002-video_eng=751000.m3u8
+#EXT-X-STREAM-INF:BANDWIDTH=1198000,CODECS="mp4a.40.2,avc1.4D401F",RESOLUTION=784x350,FRAME-RATE=24,AUDIO="audio-aacl-128",SUBTITLES="textstream",CLOSED-CAPTIONS=NONE
+tears-of-steel-aes-audio_eng=128002-video_eng=1001000.m3u8
+#EXT-X-STREAM-INF:BANDWIDTH=1728000,CODECS="mp4a.40.2,avc1.640028",RESOLUTION=1680x750,FRAME-RATE=24,VIDEO-RANGE=SDR,AUDIO="audio-aacl-128",SUBTITLES="textstream",CLOSED-CAPTIONS=NONE
+tears-of-steel-aes-audio_eng=128002-video_eng=1501000.m3u8
+#EXT-X-STREAM-INF:BANDWIDTH=2469000,CODECS="mp4a.40.2,avc1.640028",RESOLUTION=1680x750,FRAME-RATE=24,VIDEO-RANGE=SDR,AUDIO="audio-aacl-128",SUBTITLES="textstream",CLOSED-CAPTIONS=NONE
+tears-of-steel-aes-audio_eng=128002-video_eng=2200000.m3u8
+
+# variants
+#EXT-X-STREAM-INF:BANDWIDTH=1025000,CODECS="mp4a.40.2,hvc1.1.6.L150.90",RESOLUTION=1680x750,FRAME-RATE=24,VIDEO-RANGE=SDR,AUDIO="audio-aacl-64",SUBTITLES="textstream",CLOSED-CAPTIONS=NONE
+tears-of-steel-aes-audio_eng=64008-video_eng_1=902000.m3u8
+#EXT-X-STREAM-INF:BANDWIDTH=1368000,CODECS="mp4a.40.2,hvc1.1.6.L150.90",RESOLUTION=2576x1150,FRAME-RATE=24,VIDEO-RANGE=SDR,AUDIO="audio-aacl-128",SUBTITLES="textstream",CLOSED-CAPTIONS=NONE
+tears-of-steel-aes-audio_eng=128002-video_eng_1=1161000.m3u8
+#EXT-X-STREAM-INF:BANDWIDTH=1815000,CODECS="mp4a.40.2,hvc1.1.6.L150.90",RESOLUTION=3360x1500,FRAME-RATE=24,VIDEO-RANGE=SDR,AUDIO="audio-aacl-128",SUBTITLES="textstream",CLOSED-CAPTIONS=NONE
+tears-of-steel-aes-audio_eng=128002-video_eng_1=1583000.m3u8
+
+# variants
+#EXT-X-STREAM-INF:BANDWIDTH=69000,CODECS="mp4a.40.2",AUDIO="audio-aacl-64",SUBTITLES="textstream"
+tears-of-steel-aes-audio_eng=64008.m3u8
+#EXT-X-STREAM-INF:BANDWIDTH=137000,CODECS="mp4a.40.2",AUDIO="audio-aacl-128",SUBTITLES="textstream"
+tears-of-steel-aes-audio_eng=128002.m3u8
+
+# keyframes
+#EXT-X-I-FRAME-STREAM-INF:BANDWIDTH=54000,CODECS="avc1.42C00D",RESOLUTION=224x100,URI="keyframes/tears-of-steel-aes-video_eng=401000.m3u8"
+#EXT-X-I-FRAME-STREAM-INF:BANDWIDTH=100000,CODECS="avc1.42C016",RESOLUTION=448x200,URI="keyframes/tears-of-steel-aes-video_eng=751000.m3u8"
+#EXT-X-I-FRAME-STREAM-INF:BANDWIDTH=133000,CODECS="avc1.4D401F",RESOLUTION=784x350,URI="keyframes/tears-of-steel-aes-video_eng=1001000.m3u8"
+#EXT-X-I-FRAME-STREAM-INF:BANDWIDTH=120000,CODECS="hvc1.1.6.L150.90",RESOLUTION=1680x750,VIDEO-RANGE=SDR,URI="keyframes/tears-of-steel-aes-video_eng_1=902000.m3u8"
+#EXT-X-I-FRAME-STREAM-INF:BANDWIDTH=154000,CODECS="hvc1.1.6.L150.90",RESOLUTION=2576x1150,VIDEO-RANGE=SDR,URI="keyframes/tears-of-steel-aes-video_eng_1=1161000.m3u8"
+#EXT-X-I-FRAME-STREAM-INF:BANDWIDTH=210000,CODECS="hvc1.1.6.L150.90",RESOLUTION=3360x1500,VIDEO-RANGE=SDR,URI="keyframes/tears-of-steel-aes-video_eng_1=1583000.m3u8"
diff --git a/media/libstagefright/httplive/fuzzer/corpus/index8.m3u8 b/media/libstagefright/httplive/fuzzer/corpus/index8.m3u8
new file mode 100644
index 0000000..353d589
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/index8.m3u8
@@ -0,0 +1,13 @@
+#EXTM3U
+#EXT-X-VERSION:5
+
+#EXT-X-MEDIA:TYPE=AUDIO,GROUP-ID="audio",NAME="English stereo",LANGUAGE="en",AUTOSELECT=YES,URI="../../fuzz/arm64/httplive_fuzzer/index1.m3u8"
+
+#EXT-X-STREAM-INF:BANDWIDTH=628000,CODECS="avc1.42c00d,mp4a.40.2",RESOLUTION=320x180,AUDIO="audio"
+../../fuzz/arm64/httplive_fuzzer/index1.m3u8
+#EXT-X-STREAM-INF:BANDWIDTH=928000,CODECS="avc1.42c00d,mp4a.40.2",RESOLUTION=480x270,AUDIO="audio"
+../../fuzz/arm64/httplive_fuzzer/index2.m3u8
+#EXT-X-STREAM-INF:BANDWIDTH=1728000,CODECS="avc1.42c00d,mp4a.40.2",RESOLUTION=640x360,AUDIO="audio"
+../../fuzz/arm64/httplive_fuzzer/index3.m3u8
+#EXT-X-STREAM-INF:BANDWIDTH=2528000,CODECS="avc1.42c00d,mp4a.40.2",RESOLUTION=960x540,AUDIO="audio"
+../../fuzz/arm64/httplive_fuzzer/index1.m3u8
diff --git a/media/libstagefright/httplive/fuzzer/corpus/prog_index.m3u8 b/media/libstagefright/httplive/fuzzer/corpus/prog_index.m3u8
new file mode 100644
index 0000000..eb88422
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/corpus/prog_index.m3u8
@@ -0,0 +1,17 @@
+#EXTM3U
+#EXT-X-MEDIA:TYPE=AUDIO,GROUP-ID="audio",LANGUAGE="eng",NAME="English",AUTOSELECT=YES,DEFAULT=YES,URI="corpus/index1.m3u8"
+#EXT-X-MEDIA:TYPE=AUDIO,GROUP-ID="audio",LANGUAGE="fre",NAME="Français",AUTOSELECT=YES,DEFAULT=NO,URI="corpus/index1.m3u8"
+#EXT-X-MEDIA:TYPE=AUDIO,GROUP-ID="audio",LANGUAGE="sp",NAME="Espanol",AUTOSELECT=YES,DEFAULT=NO,URI="corpus/index1.m3u8"
+#EXT-X-VERSION:4
+#EXT-X-TARGETDURATION:5
+#EXT-X-KEY:METHOD=NONE
+#EXT-X-DISCONTINUITY-SEQUENCE:0
+#EXT-X-PLAYLIST-TYPE:VOD
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXT-X-DISCONTINUITY
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXTINF:5,
+https://non.existentsite.com/test-doesnt-dereference-these-paths/fileSequence1.ts
+#EXT-X-ENDLIST
diff --git a/media/libstagefright/httplive/fuzzer/httplive_fuzzer.cpp b/media/libstagefright/httplive/fuzzer/httplive_fuzzer.cpp
new file mode 100644
index 0000000..aa777b3
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/httplive_fuzzer.cpp
@@ -0,0 +1,298 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <fstream>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <LiveDataSource.h>
+#include <LiveSession.h>
+#include <media/MediaHTTPConnection.h>
+#include <media/MediaHTTPService.h>
+#include <media/mediaplayer_common.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/AHandler.h>
+#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/ALooperRoster.h>
+#include <string>
+#include <utils/Log.h>
+
+using namespace std;
+using namespace android;
+
+constexpr char kFileNamePrefix[] = "/data/local/tmp/httplive-";
+constexpr char kFileNameSuffix[] = ".m3u8";
+constexpr char kFileUrlPrefix[] = "file://";
+constexpr int64_t kOffSet = 0;
+constexpr int32_t kReadyMarkMs = 5000;
+constexpr int32_t kPrepareMarkMs = 1500;
+constexpr int32_t kErrorNoMax = -1;
+constexpr int32_t kErrorNoMin = -34;
+constexpr int32_t kMaxTimeUs = 1000;
+constexpr int32_t kRandomStringLength = 64;
+constexpr int32_t kRangeMin = 0;
+constexpr int32_t kRangeMax = 1000;
+
+constexpr LiveSession::StreamType kValidStreamType[] = {
+    LiveSession::STREAMTYPE_AUDIO, LiveSession::STREAMTYPE_VIDEO,
+    LiveSession::STREAMTYPE_SUBTITLES, LiveSession::STREAMTYPE_METADATA};
+
+constexpr MediaSource::ReadOptions::SeekMode kValidSeekMode[] = {
+    MediaSource::ReadOptions::SeekMode::SEEK_PREVIOUS_SYNC,
+    MediaSource::ReadOptions::SeekMode::SEEK_NEXT_SYNC,
+    MediaSource::ReadOptions::SeekMode::SEEK_CLOSEST_SYNC,
+    MediaSource::ReadOptions::SeekMode::SEEK_CLOSEST,
+    MediaSource::ReadOptions::SeekMode::SEEK_FRAME_INDEX};
+
+constexpr media_track_type kValidMediaTrackType[] = {
+    MEDIA_TRACK_TYPE_UNKNOWN,  MEDIA_TRACK_TYPE_VIDEO,
+    MEDIA_TRACK_TYPE_AUDIO,    MEDIA_TRACK_TYPE_TIMEDTEXT,
+    MEDIA_TRACK_TYPE_SUBTITLE, MEDIA_TRACK_TYPE_METADATA};
+
+struct TestAHandler : public AHandler {
+public:
+  TestAHandler(std::function<void()> signalEosFunction)
+      : mSignalEosFunction(signalEosFunction) {}
+  virtual ~TestAHandler() {}
+
+protected:
+  void onMessageReceived(const sp<AMessage> &msg) override {
+    int32_t what = -1;
+    msg->findInt32("what", &what);
+    switch (what) {
+    case LiveSession::kWhatError:
+    case LiveSession::kWhatPrepared:
+    case LiveSession::kWhatPreparationFailed: {
+      mSignalEosFunction();
+      break;
+    }
+    }
+    return;
+  }
+
+private:
+  std::function<void()> mSignalEosFunction;
+};
+
+struct TestMediaHTTPConnection : public MediaHTTPConnection {
+public:
+  TestMediaHTTPConnection() {}
+  virtual ~TestMediaHTTPConnection() {}
+
+  virtual bool connect(const char * /*uri*/,
+                       const KeyedVector<String8, String8> * /*headers*/) {
+    return true;
+  }
+
+  virtual void disconnect() { return; }
+
+  virtual ssize_t readAt(off64_t /*offset*/, void * /*data*/, size_t size) {
+    return size;
+  }
+
+  virtual off64_t getSize() { return 0; }
+  virtual status_t getMIMEType(String8 * /*mimeType*/) { return NO_ERROR; }
+  virtual status_t getUri(String8 * /*uri*/) { return NO_ERROR; }
+
+private:
+  DISALLOW_EVIL_CONSTRUCTORS(TestMediaHTTPConnection);
+};
+
+struct TestMediaHTTPService : public MediaHTTPService {
+public:
+  TestMediaHTTPService() {}
+  ~TestMediaHTTPService(){};
+
+  virtual sp<MediaHTTPConnection> makeHTTPConnection() {
+    mediaHTTPConnection = sp<TestMediaHTTPConnection>::make();
+    return mediaHTTPConnection;
+  }
+
+private:
+  sp<TestMediaHTTPConnection> mediaHTTPConnection = nullptr;
+  DISALLOW_EVIL_CONSTRUCTORS(TestMediaHTTPService);
+};
+
+class HttpLiveFuzzer {
+public:
+  void process(const uint8_t *data, size_t size);
+  void deInitLiveSession();
+  ~HttpLiveFuzzer() { deInitLiveSession(); }
+
+private:
+  void invokeLiveDataSource();
+  void createM3U8File(const uint8_t *data, size_t size);
+  void initLiveDataSource();
+  void invokeLiveSession();
+  void initLiveSession();
+  void invokeDequeueAccessUnit();
+  void invokeConnectAsync();
+  void invokeSeekTo();
+  void invokeGetConfig();
+  void signalEos();
+  string generateFileName();
+  sp<LiveDataSource> mLiveDataSource = nullptr;
+  sp<LiveSession> mLiveSession = nullptr;
+  sp<ALooper> mLiveLooper = nullptr;
+  sp<TestMediaHTTPService> httpService = nullptr;
+  sp<TestAHandler> mHandler = nullptr;
+  FuzzedDataProvider *mFDP = nullptr;
+  bool mEosReached = false;
+  std::mutex mDownloadCompleteMutex;
+  std::condition_variable mConditionalVariable;
+};
+
+string HttpLiveFuzzer::generateFileName() {
+  return kFileNamePrefix + to_string(getpid()) + kFileNameSuffix;
+}
+
+void HttpLiveFuzzer::createM3U8File(const uint8_t *data, size_t size) {
+  ofstream m3u8File;
+  string currentFileName = generateFileName();
+  m3u8File.open(currentFileName, ios::out | ios::binary);
+  m3u8File.write((char *)data, size);
+  m3u8File.close();
+}
+
+void HttpLiveFuzzer::initLiveDataSource() {
+  mLiveDataSource = sp<LiveDataSource>::make();
+}
+
+void HttpLiveFuzzer::invokeLiveDataSource() {
+  initLiveDataSource();
+  size_t size = mFDP->ConsumeIntegralInRange<size_t>(kRangeMin, kRangeMax);
+  sp<ABuffer> buffer = new ABuffer(size);
+  mLiveDataSource->queueBuffer(buffer);
+  uint8_t *data = new uint8_t[size];
+  mLiveDataSource->readAtNonBlocking(kOffSet, data, size);
+  int32_t finalResult = mFDP->ConsumeIntegralInRange(kErrorNoMin, kErrorNoMax);
+  mLiveDataSource->queueEOS(finalResult);
+  mLiveDataSource->reset();
+  mLiveDataSource->countQueuedBuffers();
+  mLiveDataSource->initCheck();
+  delete[] data;
+}
+
+void HttpLiveFuzzer::initLiveSession() {
+  ALooperRoster looperRoster;
+  mHandler =
+      sp<TestAHandler>::make(std::bind(&HttpLiveFuzzer::signalEos, this));
+  mLiveLooper = sp<ALooper>::make();
+  mLiveLooper->setName("http live");
+  mLiveLooper->start();
+  sp<AMessage> notify = sp<AMessage>::make(0, mHandler);
+  httpService = new TestMediaHTTPService();
+  uint32_t flags = mFDP->ConsumeIntegral<uint32_t>();
+  mLiveSession = sp<LiveSession>::make(notify, flags, httpService);
+  mLiveLooper->registerHandler(mLiveSession);
+  looperRoster.registerHandler(mLiveLooper, mHandler);
+}
+
+void HttpLiveFuzzer::invokeDequeueAccessUnit() {
+  LiveSession::StreamType stream = mFDP->PickValueInArray(kValidStreamType);
+  sp<ABuffer> buffer;
+  mLiveSession->dequeueAccessUnit(stream, &buffer);
+}
+
+void HttpLiveFuzzer::invokeSeekTo() {
+  int64_t timeUs = mFDP->ConsumeIntegralInRange<int64_t>(0, kMaxTimeUs);
+  MediaSource::ReadOptions::SeekMode mode =
+      mFDP->PickValueInArray(kValidSeekMode);
+  mLiveSession->seekTo(timeUs, mode);
+}
+
+void HttpLiveFuzzer::invokeGetConfig() {
+  mLiveSession->getTrackCount();
+  size_t trackIndex = mFDP->ConsumeIntegral<size_t>();
+  mLiveSession->getTrackInfo(trackIndex);
+  media_track_type type = mFDP->PickValueInArray(kValidMediaTrackType);
+  mLiveSession->getSelectedTrack(type);
+  sp<MetaData> meta;
+  LiveSession::StreamType stream = mFDP->PickValueInArray(kValidStreamType);
+  mLiveSession->getStreamFormatMeta(stream, &meta);
+  mLiveSession->getKeyForStream(stream);
+  if (stream != LiveSession::STREAMTYPE_SUBTITLES) {
+    mLiveSession->getSourceTypeForStream(stream);
+  }
+}
+
+void HttpLiveFuzzer::invokeConnectAsync() {
+  string currentFileName = generateFileName();
+  string url = kFileUrlPrefix + currentFileName;
+  string str_1 = mFDP->ConsumeRandomLengthString(kRandomStringLength);
+  string str_2 = mFDP->ConsumeRandomLengthString(kRandomStringLength);
+
+  KeyedVector<String8, String8> headers;
+  headers.add(String8(str_1.c_str()), String8(str_2.c_str()));
+  mLiveSession->connectAsync(url.c_str(), &headers);
+}
+
+void HttpLiveFuzzer::invokeLiveSession() {
+  initLiveSession();
+  BufferingSettings bufferingSettings;
+  bufferingSettings.mInitialMarkMs = kPrepareMarkMs;
+  bufferingSettings.mResumePlaybackMarkMs = kReadyMarkMs;
+  mLiveSession->setBufferingSettings(bufferingSettings);
+  invokeConnectAsync();
+  std::unique_lock waitForDownloadComplete(mDownloadCompleteMutex);
+  mConditionalVariable.wait(waitForDownloadComplete,
+                            [this] { return mEosReached; });
+  if (mLiveSession->isSeekable()) {
+    invokeSeekTo();
+  }
+  invokeDequeueAccessUnit();
+  size_t index = mFDP->ConsumeIntegral<size_t>();
+  bool select = mFDP->ConsumeBool();
+  mLiveSession->selectTrack(index, select);
+  mLiveSession->hasDynamicDuration();
+  int64_t firstTimeUs =
+      mFDP->ConsumeIntegralInRange<int64_t>(kRangeMin, kRangeMax);
+  int64_t timeUs = mFDP->ConsumeIntegralInRange<int64_t>(kRangeMin, kRangeMax);
+  int32_t discontinuitySeq = mFDP->ConsumeIntegral<int32_t>();
+  mLiveSession->calculateMediaTimeUs(firstTimeUs, timeUs, discontinuitySeq);
+  invokeGetConfig();
+}
+
+void HttpLiveFuzzer::process(const uint8_t *data, size_t size) {
+  mFDP = new FuzzedDataProvider(data, size);
+  createM3U8File(data, size);
+  invokeLiveDataSource();
+  invokeLiveSession();
+  delete mFDP;
+}
+
+void HttpLiveFuzzer::deInitLiveSession() {
+  if (mLiveSession != nullptr) {
+    mLiveSession->disconnect();
+    mLiveLooper->unregisterHandler(mLiveSession->id());
+    mLiveLooper->stop();
+  }
+  mLiveSession.clear();
+  mLiveLooper.clear();
+}
+
+void HttpLiveFuzzer::signalEos() {
+  mEosReached = true;
+  {
+    std::lock_guard<std::mutex> waitForDownloadComplete(mDownloadCompleteMutex);
+  }
+  mConditionalVariable.notify_one();
+  return;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+  HttpLiveFuzzer httpliveFuzzer;
+  httpliveFuzzer.process(data, size);
+  return 0;
+}
diff --git a/media/libstagefright/httplive/fuzzer/httplive_fuzzer.dict b/media/libstagefright/httplive/fuzzer/httplive_fuzzer.dict
new file mode 100644
index 0000000..703cc7e
--- /dev/null
+++ b/media/libstagefright/httplive/fuzzer/httplive_fuzzer.dict
@@ -0,0 +1,15 @@
+#m3u8-Tags
+kw1="#EXTM3U"
+kw2="#EXT-X-VERSION:"
+kw3="#EXT-X-TARGETDURATION:"
+kw4="#EXT-X-PLAYLIST-TYPE:"
+kw5="#EXTINF:"
+kw6="#EXT-X-ENDLIST"
+kw7="#EXT-X-MEDIA-SEQUENCE:"
+kw8="#EXT-X-KEY:METHOD=NONE"
+kw9="#EXT-X-DISCONTINUITY:"
+kw10="#EXT-X-DISCONTINUITY-SEQUENCE:0"
+kw11="#EXT-X-STREAM-INF:BANDWIDTH="
+kw12="#EXT-X-STREAM-INF:CODECS="
+kw13="#EXT-X-BYTERANGE:"
+kw14="#EXT-X-MEDIA"
diff --git a/media/libstagefright/id3/TEST_MAPPING b/media/libstagefright/id3/TEST_MAPPING
index d82d26e..6106908 100644
--- a/media/libstagefright/id3/TEST_MAPPING
+++ b/media/libstagefright/id3/TEST_MAPPING
@@ -9,14 +9,15 @@
 
   "presubmit-large": [
     // this doesn't seem to run any tests.
-    // but: cts-tradefed run -m CtsMediaTestCases -t android.media.cts.MediaMetadataRetrieverTest
+    // but: cts-tradefed run -m CtsMediaMiscTestCases -t \
+    // android.media.misc.cts.MediaMetadataRetrieverTest
     // does run he 32 and 64 bit tests, but not the instant tests
     // but all I know is that with 'atest', it's not running
     {
-      "name": "CtsMediaTestCases",
+      "name": "CtsMediaMiscTestCases",
       "options": [
           {
-            "include-filter": "android.media.cts.MediaMetadataRetrieverTest"
+            "include-filter": "android.media.misc.cts.MediaMetadataRetrieverTest"
           }
       ]
     }
diff --git a/media/libstagefright/include/media/stagefright/ACodec.h b/media/libstagefright/include/media/stagefright/ACodec.h
index c84cc10..632b32c 100644
--- a/media/libstagefright/include/media/stagefright/ACodec.h
+++ b/media/libstagefright/include/media/stagefright/ACodec.h
@@ -71,6 +71,9 @@
     virtual void initiateSetInputSurface(const sp<PersistentSurface> &surface);
     virtual void initiateStart();
     virtual void initiateShutdown(bool keepComponentAllocated = false);
+    virtual status_t querySupportedParameters(std::vector<std::string> *names) override;
+    virtual status_t subscribeToParameters(const std::vector<std::string> &names) override;
+    virtual status_t unsubscribeFromParameters(const std::vector<std::string> &names) override;
 
     status_t queryCapabilities(
             const char* owner, const char* name,
diff --git a/media/libstagefright/include/media/stagefright/ColorConverter.h b/media/libstagefright/include/media/stagefright/ColorConverter.h
index 75b0d8e..1d86a22 100644
--- a/media/libstagefright/include/media/stagefright/ColorConverter.h
+++ b/media/libstagefright/include/media/stagefright/ColorConverter.h
@@ -54,6 +54,7 @@
         uint32_t mTransfer;
 
         bool isBt709();
+        bool isBt2020();
         bool isJpeg();
     };
 
@@ -78,8 +79,10 @@
     OMX_COLOR_FORMATTYPE mSrcFormat, mDstFormat;
     ColorSpace mSrcColorSpace;
     uint8_t *mClip;
+    uint16_t *mClip10Bit;
 
     uint8_t *initClip();
+    uint16_t *initClip10Bit();
 
     status_t convertCbYCrY(
             const BitmapParams &src, const BitmapParams &dst);
@@ -111,6 +114,12 @@
     status_t convertTIYUV420PackedSemiPlanar(
             const BitmapParams &src, const BitmapParams &dst);
 
+    status_t convertYUVP010(
+                const BitmapParams &src, const BitmapParams &dst);
+
+    status_t convertYUVP010ToRGBA1010102(
+                const BitmapParams &src, const BitmapParams &dst);
+
     ColorConverter(const ColorConverter &);
     ColorConverter &operator=(const ColorConverter &);
 };
diff --git a/media/libstagefright/include/media/stagefright/MediaBuffer.h b/media/libstagefright/include/media/stagefright/MediaBuffer.h
index 2c03f27..f070aac 100644
--- a/media/libstagefright/include/media/stagefright/MediaBuffer.h
+++ b/media/libstagefright/include/media/stagefright/MediaBuffer.h
@@ -105,7 +105,6 @@
         if (mMemory.get() == nullptr || mMemory->unsecurePointer() == nullptr) return 0;
         int32_t remoteRefcount =
                 reinterpret_cast<SharedControl *>(mMemory->unsecurePointer())->getRemoteRefcount();
-        // Sanity check so that remoteRefCount() is non-negative.
         return remoteRefcount >= 0 ? remoteRefcount : 0; // do not allow corrupted data.
 #else
         return 0;
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index d372140..ce3b0d0 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -445,6 +445,12 @@
     int32_t mRotationDegrees;
     int32_t mAllowFrameDroppingBySurface;
 
+    uint32_t mHDRMetadataFlags; /* bitmask of kFlagHDR* */
+    enum {
+        kFlagHDRStaticInfo = 1 << 0,
+        kFlagHDR10PlusInfo = 1 << 1,
+    };
+
     // initial create parameters
     AString mInitName;
 
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
index 4237e8c..9040e8b 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
@@ -364,7 +364,7 @@
 inline static const char *asString_AV1Profile(int32_t i, const char *def = "??") {
     switch (i) {
         case AV1ProfileMain8:           return "Main8";
-        case AV1ProfileMain10:          return "Main10HDR";
+        case AV1ProfileMain10:          return "Main10";
         case AV1ProfileMain10HDR10:     return "Main10HDR10";
         case AV1ProfileMain10HDR10Plus: return "Main10HDRPlus";
         default:                        return def;
@@ -540,6 +540,9 @@
 constexpr int32_t DolbyVisionLevelUhd30   = 0x40;
 constexpr int32_t DolbyVisionLevelUhd48   = 0x80;
 constexpr int32_t DolbyVisionLevelUhd60   = 0x100;
+constexpr int32_t DolbyVisionLevelUhd120  = 0x200;
+constexpr int32_t DolbyVisionLevel8k30    = 0x400;
+constexpr int32_t DolbyVisionLevel8k60    = 0x800;
 
 inline static const char *asString_DolbyVisionLevel(int32_t i, const char *def = "??") {
     switch (i) {
@@ -552,6 +555,9 @@
         case DolbyVisionLevelUhd30: return "Uhd30";
         case DolbyVisionLevelUhd48: return "Uhd48";
         case DolbyVisionLevelUhd60: return "Uhd60";
+        case DolbyVisionLevelUhd120: return "Uhd120";
+        case DolbyVisionLevel8k30:  return "8k30";
+        case DolbyVisionLevel8k60:  return "8k60";
         default:                    return def;
     }
 }
@@ -586,9 +592,11 @@
 constexpr int32_t COLOR_Format24bitBGR888             = 12;
 constexpr int32_t COLOR_Format24bitRGB888             = 11;
 constexpr int32_t COLOR_Format25bitARGB1888           = 14;
+constexpr int32_t COLOR_Format32bitABGR2101010        = 0x7F00AAA2;
 constexpr int32_t COLOR_Format32bitABGR8888           = 0x7F00A000;
 constexpr int32_t COLOR_Format32bitARGB8888           = 16;
 constexpr int32_t COLOR_Format32bitBGRA8888           = 15;
+constexpr int32_t COLOR_Format64bitABGRFloat          = 0x7F000F16;
 constexpr int32_t COLOR_Format8bitRGB332              = 2;
 constexpr int32_t COLOR_FormatCbYCrY                  = 27;
 constexpr int32_t COLOR_FormatCrYCbY                  = 28;
@@ -642,9 +650,11 @@
         case COLOR_Format24bitBGR888:               return "24bitBGR888";
         case COLOR_Format24bitRGB888:               return "24bitRGB888";
         case COLOR_Format25bitARGB1888:             return "25bitARGB1888";
+        case COLOR_Format32bitABGR2101010:          return "32bitABGR2101010";
         case COLOR_Format32bitABGR8888:             return "32bitABGR8888";
         case COLOR_Format32bitARGB8888:             return "32bitARGB8888";
         case COLOR_Format32bitBGRA8888:             return "32bitBGRA8888";
+        case COLOR_Format64bitABGRFloat:            return "64bitABGRFloat";
         case COLOR_Format8bitRGB332:                return "8bitRGB332";
         case COLOR_FormatCbYCrY:                    return "CbYCrY";
         case COLOR_FormatCrYCbY:                    return "CrYCbY";
@@ -677,6 +687,7 @@
         case COLOR_FormatYUV422SemiPlanar:          return "YUV422SemiPlanar";
         case COLOR_FormatYUV444Flexible:            return "YUV444Flexible";
         case COLOR_FormatYUV444Interleaved:         return "YUV444Interleaved";
+        case COLOR_FormatYUVP010:                   return "YUVP010";
         case COLOR_QCOM_FormatYUV420SemiPlanar:     return "QCOM_YUV420SemiPlanar";
         case COLOR_TI_FormatYUV420PackedSemiPlanar: return "TI_YUV420PackedSemiPlanar";
         default:                                    return def;
@@ -684,6 +695,7 @@
 }
 
 constexpr char FEATURE_AdaptivePlayback[]       = "adaptive-playback";
+constexpr char FEATURE_EncodingStatistics[]     = "encoding-statistics";
 constexpr char FEATURE_IntraRefresh[] = "intra-refresh";
 constexpr char FEATURE_PartialFrame[] = "partial-frame";
 constexpr char FEATURE_QpBounds[] = "qp-bounds";
@@ -737,6 +749,14 @@
 constexpr int32_t COLOR_TRANSFER_SDR_VIDEO = 3;
 constexpr int32_t COLOR_TRANSFER_ST2084 = 6;
 
+constexpr int32_t PICTURE_TYPE_I = 1;
+constexpr int32_t PICTURE_TYPE_P = 2;
+constexpr int32_t PICTURE_TYPE_B = 3;
+constexpr int32_t PICTURE_TYPE_UNKNOWN = 0;
+
+constexpr int32_t VIDEO_ENCODING_STATISTICS_LEVEL_1 = 1;
+constexpr int32_t VIDEO_ENCODING_STATISTICS_LEVEL_NONE = 0;
+
 constexpr char KEY_AAC_DRC_ALBUM_MODE[] = "aac-drc-album-mode";
 constexpr char KEY_AAC_DRC_ATTENUATION_FACTOR[] = "aac-drc-cut-level";
 constexpr char KEY_AAC_DRC_BOOST_FACTOR[] = "aac-drc-boost-level";
@@ -795,6 +815,7 @@
 constexpr char KEY_OPERATING_RATE[] = "operating-rate";
 constexpr char KEY_OUTPUT_REORDER_DEPTH[] = "output-reorder-depth";
 constexpr char KEY_PCM_ENCODING[] = "pcm-encoding";
+constexpr char KEY_PICTURE_TYPE[] = "picture-type";
 constexpr char KEY_PIXEL_ASPECT_RATIO_HEIGHT[] = "sar-height";
 constexpr char KEY_PIXEL_ASPECT_RATIO_WIDTH[] = "sar-width";
 constexpr char KEY_PREPEND_HEADER_TO_SYNC_FRAMES[] = "prepend-sps-pps-to-idr-frames";
@@ -811,6 +832,8 @@
 constexpr char KEY_TILE_HEIGHT[] = "tile-height";
 constexpr char KEY_TILE_WIDTH[] = "tile-width";
 constexpr char KEY_TRACK_ID[] = "track-id";
+constexpr char KEY_VIDEO_ENCODING_STATISTICS_LEVEL[] = "video-encoding-statistics-level";
+constexpr char KEY_VIDEO_QP_AVERAGE[] = "video-qp-average";
 constexpr char KEY_VIDEO_QP_B_MAX[] = "video-qp-b-max";
 constexpr char KEY_VIDEO_QP_B_MIN[] = "video-qp-b-min";
 constexpr char KEY_VIDEO_QP_I_MAX[] = "video-qp-i-max";
diff --git a/media/libstagefright/include/media/stagefright/MediaErrors.h b/media/libstagefright/include/media/stagefright/MediaErrors.h
index d1df2ca..b91c850 100644
--- a/media/libstagefright/include/media/stagefright/MediaErrors.h
+++ b/media/libstagefright/include/media/stagefright/MediaErrors.h
@@ -163,11 +163,28 @@
             || (ERROR_DRM_VENDOR_MIN <= err && err <= ERROR_DRM_VENDOR_MAX);
 }
 
-static inline std::string StrCryptoError(status_t err) {
 #define STATUS_CASE(STATUS) \
     case STATUS:            \
         return #STATUS
 
+static inline std::string StrMediaError(status_t err) {
+    switch(err) {
+        STATUS_CASE(ERROR_ALREADY_CONNECTED);
+        STATUS_CASE(ERROR_NOT_CONNECTED);
+        STATUS_CASE(ERROR_UNKNOWN_HOST);
+        STATUS_CASE(ERROR_CANNOT_CONNECT);
+        STATUS_CASE(ERROR_IO);
+        STATUS_CASE(ERROR_CONNECTION_LOST);
+        STATUS_CASE(ERROR_MALFORMED);
+        STATUS_CASE(ERROR_OUT_OF_RANGE);
+        STATUS_CASE(ERROR_BUFFER_TOO_SMALL);
+        STATUS_CASE(ERROR_UNSUPPORTED);
+        STATUS_CASE(ERROR_END_OF_STREAM);
+    }
+    return statusToString(err);
+}
+
+static inline std::string StrCryptoError(status_t err) {
     switch (err) {
         STATUS_CASE(ERROR_DRM_UNKNOWN);
         STATUS_CASE(ERROR_DRM_NO_LICENSE);
@@ -209,10 +226,10 @@
         STATUS_CASE(ERROR_DRM_STORAGE_READ);
         STATUS_CASE(ERROR_DRM_STORAGE_WRITE);
         STATUS_CASE(ERROR_DRM_ZERO_SUBSAMPLES);
-#undef STATUS_CASE
     }
     return statusToString(err);
 }
+#undef STATUS_CASE
 
 }  // namespace android
 
diff --git a/media/libstagefright/include/media/stagefright/MetaDataBase.h b/media/libstagefright/include/media/stagefright/MetaDataBase.h
index c80012e..88c1f3f 100644
--- a/media/libstagefright/include/media/stagefright/MetaDataBase.h
+++ b/media/libstagefright/include/media/stagefright/MetaDataBase.h
@@ -60,6 +60,8 @@
     kKeyAVCC              = 'avcc',  // raw data
     kKeyHVCC              = 'hvcc',  // raw data
     kKeyDVCC              = 'dvcc',  // raw data
+    kKeyDVVC              = 'dvvc',  // raw data
+    kKeyDVWC              = 'dvwc',  // raw data
     kKeyAV1C              = 'av1c',  // raw data
     kKeyThumbnailHVCC     = 'thvc',  // raw data
     kKeyThumbnailAV1C     = 'tav1',  // raw data
@@ -283,6 +285,8 @@
     kTypeHVCC        = 'hvcc',
     kTypeAV1C        = 'av1c',
     kTypeDVCC        = 'dvcc',
+    kTypeDVVC        = 'dvvc',
+    kTypeDVWC        = 'dvwc',
     kTypeD263        = 'd263',
     kTypeHCOS        = 'hcos',
 };
diff --git a/media/libstagefright/renderfright/Android.bp b/media/libstagefright/renderfright/Android.bp
index 9a7bad9..3c00a1c 100644
--- a/media/libstagefright/renderfright/Android.bp
+++ b/media/libstagefright/renderfright/Android.bp
@@ -87,7 +87,7 @@
         enabled: true,
     },
     double_loadable: true,
-    clang: true,
+
     cflags: [
         "-fvisibility=hidden",
         "-Werror=format",
diff --git a/media/libstagefright/rtsp/AMPEG4ElementaryAssembler.cpp b/media/libstagefright/rtsp/AMPEG4ElementaryAssembler.cpp
index 7bd33c1..847d324 100644
--- a/media/libstagefright/rtsp/AMPEG4ElementaryAssembler.cpp
+++ b/media/libstagefright/rtsp/AMPEG4ElementaryAssembler.cpp
@@ -204,7 +204,7 @@
 };
 
 bool AMPEG4ElementaryAssembler::initCheck() {
-    if(mSizeLength == 0 || mIndexLength == 0 || mIndexDeltaLength == 0) {
+    if(mIsGeneric && (mSizeLength == 0 || mIndexLength == 0 || mIndexDeltaLength == 0)) {
         android_errorWriteLog(0x534e4554, "124777537");
         return false;
     }
diff --git a/media/libstagefright/tests/fuzzers/Android.bp b/media/libstagefright/tests/fuzzers/Android.bp
index 0097830..ea17a4d 100644
--- a/media/libstagefright/tests/fuzzers/Android.bp
+++ b/media/libstagefright/tests/fuzzers/Android.bp
@@ -86,9 +86,6 @@
     dictionary: "dictionaries/formats.dict",
     defaults: ["libstagefright_fuzzer_defaults"],
     static_libs: [
-        "libstagefright_webm",
         "libdatasource",
-        "libstagefright_esds",
-        "libogg",
     ],
 }
diff --git a/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.cpp b/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.cpp
index 810ae95..2b2692f 100644
--- a/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.cpp
+++ b/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.cpp
@@ -65,8 +65,8 @@
     for (size_t i = 0; i < extractor->countTracks(); ++i) {
         sp<MetaData> meta = extractor->getTrackMetaData(i);
 
-        const char *trackMime;
-        if (!strcasecmp(mime.c_str(), trackMime)) {
+        std::string trackMime = dataProvider->PickValueInArray(kTestedMimeTypes);
+        if (!strcasecmp(mime.c_str(), trackMime.c_str())) {
             sp<IMediaSource> track = extractor->getTrack(i);
             if (track == NULL) {
                 return NULL;
diff --git a/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.h b/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.h
index 98bfb94..6856ac0 100644
--- a/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.h
+++ b/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.h
@@ -42,6 +42,51 @@
     kMaxValue = MPEG2TS,
 };
 
+static std::string kTestedMimeTypes[] = {"audio/3gpp",
+                                         "audio/amr-wb",
+                                         "audio/vorbis",
+                                         "audio/opus",
+                                         "audio/mp4a-latm",
+                                         "audio/mpeg",
+                                         "audio/mpeg-L1",
+                                         "audio/mpeg-L2",
+                                         "audio/midi",
+                                         "audio/qcelp",
+                                         "audio/g711-alaw",
+                                         "audio/g711-mlaw",
+                                         "audio/flac",
+                                         "audio/aac-adts",
+                                         "audio/gsm",
+                                         "audio/ac3",
+                                         "audio/eac3",
+                                         "audio/eac3-joc",
+                                         "audio/ac4",
+                                         "audio/scrambled",
+                                         "audio/alac",
+                                         "audio/x-ms-wma",
+                                         "audio/x-adpcm-ms",
+                                         "audio/x-adpcm-dvi-ima",
+                                         "video/avc",
+                                         "video/hevc",
+                                         "video/mp4v-es",
+                                         "video/3gpp",
+                                         "video/x-vnd.on2.vp8",
+                                         "video/x-vnd.on2.vp9",
+                                         "video/av01",
+                                         "video/mpeg2",
+                                         "video/dolby-vision",
+                                         "video/scrambled",
+                                         "video/divx",
+                                         "video/divx3",
+                                         "video/xvid",
+                                         "video/x-motion-jpeg",
+                                         "text/3gpp-tt",
+                                         "application/x-subrip",
+                                         "text/vtt",
+                                         "text/cea-608",
+                                         "text/cea-708",
+                                         "application/x-id3v4"};
+
 std::string genMimeType(FuzzedDataProvider *dataProvider);
 sp<IMediaExtractor> genMediaExtractor(FuzzedDataProvider *dataProvider, uint16_t dataAmount);
 sp<MediaSource> genMediaSource(FuzzedDataProvider *dataProvider, uint16_t maxMediaBlobSize);
diff --git a/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp b/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp
index ac1e9b1..a8e64b6 100644
--- a/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp
+++ b/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp
@@ -393,3 +393,51 @@
     std::this_thread::sleep_for(std::chrono::milliseconds(100));
     looper->stop();
 }
+
+TEST(MediaCodecTest, DeadWhileStoppingError) {
+    // Test scenario:
+    //
+    // 1) Client thread calls stop(); MediaCodec looper thread calls
+    //    initiateShutdown(); shutdown is being handled at the component thread.
+    // 2) An error occurs while handling initiateShutdown().
+    // 3) MediaCodec looper thread handles the error.
+    // 4) Codec service dies after the error is handled
+    // 5) MediaCodec looper thread handles the death.
+
+    static const AString kCodecName{"test.codec"};
+    static const AString kCodecOwner{"nobody"};
+    static const AString kMediaType{"video/x-test"};
+
+    sp<MockCodec> mockCodec;
+    std::function<sp<CodecBase>(const AString &name, const char *owner)> getCodecBase =
+        [&mockCodec](const AString &, const char *) {
+            mockCodec = new MockCodec([](const std::shared_ptr<MockBufferChannel> &) {
+                // No mock setup, as we don't expect any buffer operations
+                // in this scenario.
+            });
+            ON_CALL(*mockCodec, initiateAllocateComponent(_))
+                .WillByDefault([mockCodec](const sp<AMessage> &) {
+                    mockCodec->callback()->onComponentAllocated(kCodecName.c_str());
+                });
+            ON_CALL(*mockCodec, initiateShutdown(_))
+                .WillByDefault([mockCodec](bool) {
+                    // 2)
+                    mockCodec->callback()->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
+                    // 4)
+                    mockCodec->callback()->onError(DEAD_OBJECT, ACTION_CODE_FATAL);
+                    // Codec service has died, no callback.
+                });
+            return mockCodec;
+        };
+
+    sp<ALooper> looper{new ALooper};
+    sp<MediaCodec> codec = SetupMediaCodec(
+            kCodecOwner, kCodecName, kMediaType, looper, getCodecBase);
+    ASSERT_NE(nullptr, codec) << "Codec must not be null";
+    ASSERT_NE(nullptr, mockCodec) << "MockCodec must not be null";
+
+    codec->stop();
+    // sleep here so that the looper thread can handle the error
+    std::this_thread::sleep_for(std::chrono::milliseconds(100));
+    looper->stop();
+}
diff --git a/media/libstagefright/timedtext/test/Android.bp b/media/libstagefright/timedtext/test/Android.bp
index 0b632bf..60669f9 100644
--- a/media/libstagefright/timedtext/test/Android.bp
+++ b/media/libstagefright/timedtext/test/Android.bp
@@ -36,7 +36,6 @@
 
     static_libs: [
         "libstagefright_timedtext",
-        "libstagefright_foundation",
     ],
 
     include_dirs: [
@@ -47,6 +46,7 @@
         "liblog",
         "libmedia",
         "libbinder",
+        "libstagefright_foundation",
     ],
 
     cflags: [
diff --git a/media/libstagefright/writer_fuzzers/WriterFuzzerBase.cpp b/media/libstagefright/writer_fuzzers/WriterFuzzerBase.cpp
index ee7af70..b97f347 100644
--- a/media/libstagefright/writer_fuzzers/WriterFuzzerBase.cpp
+++ b/media/libstagefright/writer_fuzzers/WriterFuzzerBase.cpp
@@ -174,10 +174,13 @@
             params.sampleRate = 16000;
         } else {
             params.sampleRate = max(1, params.sampleRate);
+            params.channelCount = max(0, params.channelCount);
         }
         format->setInt32("channel-count", params.channelCount);
         format->setInt32("sample-rate", params.sampleRate);
     } else if (!strncmp(params.mime, "video/", 6)) {
+        params.width = max(1, params.width);
+        params.height = max(1, params.height);
         format->setInt32("width", params.width);
         format->setInt32("height", params.height);
     }
diff --git a/media/libstagefright/xmlparser/Android.bp b/media/libstagefright/xmlparser/Android.bp
index 055dd80..afc873c 100644
--- a/media/libstagefright/xmlparser/Android.bp
+++ b/media/libstagefright/xmlparser/Android.bp
@@ -41,8 +41,6 @@
         "-Wall",
     ],
 
-    clang: true,
-
     sanitize: {
         misc_undefined: [
             "unsigned-integer-overflow",
diff --git a/media/libwatchdog/Android.bp b/media/libwatchdog/Android.bp
index 411c206..5506a73 100644
--- a/media/libwatchdog/Android.bp
+++ b/media/libwatchdog/Android.bp
@@ -39,7 +39,7 @@
         darwin: {
             enabled: false,
         },
-        linux_glibc: {
+        glibc: {
             cflags: [
                 "-Dsigev_notify_thread_id=_sigev_un._tid",
             ],
diff --git a/media/mediaserver/Android.bp b/media/mediaserver/Android.bp
index e25658f..d03746d 100644
--- a/media/mediaserver/Android.bp
+++ b/media/mediaserver/Android.bp
@@ -33,7 +33,7 @@
 
     shared_libs: [
         "android.hardware.media.omx@1.0",
-        "libandroidicu",
+        "libicu",
         "libfmq",
         "libbinder",
         "libhidlbase",
diff --git a/media/mtp/OWNERS b/media/mtp/OWNERS
index 1928ba8..54d3d4a 100644
--- a/media/mtp/OWNERS
+++ b/media/mtp/OWNERS
@@ -1,6 +1,5 @@
 set noparent
 
-marcone@google.com
 jsharkey@android.com
 jameswei@google.com
 rmojumder@google.com
diff --git a/media/ndk/Android.bp b/media/ndk/Android.bp
index 8d527e9..94e5d1f 100644
--- a/media/ndk/Android.bp
+++ b/media/ndk/Android.bp
@@ -177,10 +177,6 @@
         "NdkMediaDataSourceCallbacks.cpp",
     ],
 
-    include_dirs: [
-        "frameworks/av/media/libstagefright/include",
-        "frameworks/av/media/ndk/include",
-    ],
 
     export_include_dirs: [
         "include",
@@ -193,6 +189,7 @@
     ],
 
     header_libs: [
+        "libstagefright_headers",
         "libmedia_headers",
     ],
 
@@ -223,6 +220,7 @@
         "libcutils",
         "android.hardware.graphics.bufferqueue@1.0",
     ],
+
     header_libs: [
         "libstagefright_foundation_headers",
     ],
@@ -230,9 +228,6 @@
     cflags: [
         "-D__ANDROID_VNDK__",
     ],
-    include_dirs: [
-        "frameworks/av/media/ndk/",
-    ],
 }
 
 cc_library_static {
diff --git a/media/ndk/NdkMediaCodec.cpp b/media/ndk/NdkMediaCodec.cpp
index 1ae2b44..9e92ea6 100644
--- a/media/ndk/NdkMediaCodec.cpp
+++ b/media/ndk/NdkMediaCodec.cpp
@@ -158,8 +158,7 @@
                      }
 
                      Mutex::Autolock _l(mCodec->mAsyncCallbackLock);
-                     if (mCodec->mAsyncCallbackUserData != NULL
-                         || mCodec->mAsyncCallback.onAsyncInputAvailable != NULL) {
+                     if (mCodec->mAsyncCallback.onAsyncInputAvailable != NULL) {
                          mCodec->mAsyncCallback.onAsyncInputAvailable(
                                  mCodec,
                                  mCodec->mAsyncCallbackUserData,
@@ -205,8 +204,7 @@
                          (uint32_t)flags};
 
                      Mutex::Autolock _l(mCodec->mAsyncCallbackLock);
-                     if (mCodec->mAsyncCallbackUserData != NULL
-                         || mCodec->mAsyncCallback.onAsyncOutputAvailable != NULL) {
+                     if (mCodec->mAsyncCallback.onAsyncOutputAvailable != NULL) {
                          mCodec->mAsyncCallback.onAsyncOutputAvailable(
                                  mCodec,
                                  mCodec->mAsyncCallbackUserData,
@@ -234,8 +232,7 @@
                      AMediaFormat *aMediaFormat = AMediaFormat_fromMsg(&copy);
 
                      Mutex::Autolock _l(mCodec->mAsyncCallbackLock);
-                     if (mCodec->mAsyncCallbackUserData != NULL
-                         || mCodec->mAsyncCallback.onAsyncFormatChanged != NULL) {
+                     if (mCodec->mAsyncCallback.onAsyncFormatChanged != NULL) {
                          mCodec->mAsyncCallback.onAsyncFormatChanged(
                                  mCodec,
                                  mCodec->mAsyncCallbackUserData,
@@ -259,12 +256,11 @@
                          break;
                      }
                      msg->findString("detail", &detail);
-                     ALOGE("Codec reported error(0x%x), actionCode(%d), detail(%s)",
-                           err, actionCode, detail.c_str());
+                     ALOGE("Codec reported error(0x%x/%s), actionCode(%d), detail(%s)",
+                           err, StrMediaError(err).c_str(), actionCode, detail.c_str());
 
                      Mutex::Autolock _l(mCodec->mAsyncCallbackLock);
-                     if (mCodec->mAsyncCallbackUserData != NULL
-                         || mCodec->mAsyncCallback.onAsyncError != NULL) {
+                     if (mCodec->mAsyncCallback.onAsyncError != NULL) {
                          mCodec->mAsyncCallback.onAsyncError(
                                  mCodec,
                                  mCodec->mAsyncCallbackUserData,
@@ -452,17 +448,19 @@
         uint32_t flags) {
     sp<AMessage> nativeFormat;
     AMediaFormat_getFormat(format, &nativeFormat);
-    ALOGV("configure with format: %s", nativeFormat->debugString(0).c_str());
+    // create our shallow copy, so we aren't victim to any later changes.
+    sp<AMessage> dupNativeFormat = nativeFormat->dup();
+    ALOGV("configure with format: %s", dupNativeFormat->debugString(0).c_str());
     sp<Surface> surface = NULL;
     if (window != NULL) {
         surface = (Surface*) window;
     }
 
-    status_t err = mData->mCodec->configure(nativeFormat, surface,
+    status_t err = mData->mCodec->configure(dupNativeFormat, surface,
             crypto ? crypto->mCrypto : NULL, flags);
     if (err != OK) {
         ALOGE("configure: err(%d), failed with format: %s",
-              err, nativeFormat->debugString(0).c_str());
+              err, dupNativeFormat->debugString(0).c_str());
     }
     return translate_error(err);
 }
@@ -472,16 +470,20 @@
         AMediaCodec *mData,
         AMediaCodecOnAsyncNotifyCallback callback,
         void *userdata) {
-    if (mData->mAsyncNotify == NULL && userdata != NULL) {
-        mData->mAsyncNotify = new AMessage(kWhatAsyncNotify, mData->mHandler);
-        status_t err = mData->mCodec->setCallback(mData->mAsyncNotify);
-        if (err != OK) {
-            ALOGE("setAsyncNotifyCallback: err(%d), failed to set async callback", err);
-            return translate_error(err);
-        }
-    }
 
     Mutex::Autolock _l(mData->mAsyncCallbackLock);
+
+    if (mData->mAsyncNotify == NULL) {
+        mData->mAsyncNotify = new AMessage(kWhatAsyncNotify, mData->mHandler);
+    }
+
+    // always call, codec may have been reset/re-configured since last call.
+    status_t err = mData->mCodec->setCallback(mData->mAsyncNotify);
+    if (err != OK) {
+        ALOGE("setAsyncNotifyCallback: err(%d), failed to set async callback", err);
+        return translate_error(err);
+    }
+
     mData->mAsyncCallback = callback;
     mData->mAsyncCallbackUserData = userdata;
 
diff --git a/media/ndk/NdkMediaFormat.cpp b/media/ndk/NdkMediaFormat.cpp
index 69ab242..923453a 100644
--- a/media/ndk/NdkMediaFormat.cpp
+++ b/media/ndk/NdkMediaFormat.cpp
@@ -361,6 +361,7 @@
         "mpegh-reference-channel-layout";
 EXPORT const char* AMEDIAFORMAT_KEY_OPERATING_RATE = "operating-rate";
 EXPORT const char* AMEDIAFORMAT_KEY_PCM_ENCODING = "pcm-encoding";
+EXPORT const char* AMEDIAFORMAT_KEY_PICTURE_TYPE = "picture-type";
 EXPORT const char* AMEDIAFORMAT_KEY_PRIORITY = "priority";
 EXPORT const char* AMEDIAFORMAT_KEY_PROFILE = "profile";
 EXPORT const char* AMEDIAFORMAT_KEY_PCM_BIG_ENDIAN = "pcm-big-endian";
@@ -394,6 +395,9 @@
 EXPORT const char* AMEDIAFORMAT_KEY_TRACK_ID = "track-id";
 EXPORT const char* AMEDIAFORMAT_KEY_TRACK_INDEX = "track-index";
 EXPORT const char* AMEDIAFORMAT_KEY_VALID_SAMPLES = "valid-samples";
+EXPORT const char* AMEDIAFORMAT_KEY_VIDEO_ENCODING_STATISTICS_LEVEL =
+        "video-encoding-statistics-level";
+EXPORT const char* AMEDIAFORMAT_KEY_VIDEO_QP_AVERAGE = "video-qp-average";
 EXPORT const char* AMEDIAFORMAT_VIDEO_QP_B_MAX = "video-qp-b-max";
 EXPORT const char* AMEDIAFORMAT_VIDEO_QP_B_MIN = "video-qp-b-min";
 EXPORT const char* AMEDIAFORMAT_VIDEO_QP_I_MAX = "video-qp-i-max";
diff --git a/media/ndk/OWNERS b/media/ndk/OWNERS
index 9dc441e..83644f0 100644
--- a/media/ndk/OWNERS
+++ b/media/ndk/OWNERS
@@ -1,3 +1,4 @@
-marcone@google.com
+essick@google.com
+lajos@google.com
 # For AImage/AImageReader
 include platform/frameworks/av:/camera/OWNERS
diff --git a/media/ndk/include/media/NdkMediaFormat.h b/media/ndk/include/media/NdkMediaFormat.h
index 2d2fcc0..2195657 100644
--- a/media/ndk/include/media/NdkMediaFormat.h
+++ b/media/ndk/include/media/NdkMediaFormat.h
@@ -311,6 +311,10 @@
 extern const char* AMEDIAFORMAT_KEY_LAST_SAMPLE_INDEX_IN_CHUNK __INTRODUCED_IN(31);
 extern const char* AMEDIAFORMAT_KEY_SAMPLE_TIME_BEFORE_APPEND __INTRODUCED_IN(31);
 
+extern const char* AMEDIAFORMAT_KEY_PICTURE_TYPE __INTRODUCED_IN(33);
+extern const char* AMEDIAFORMAT_KEY_VIDEO_ENCODING_STATISTICS_LEVEL __INTRODUCED_IN(33);
+extern const char* AMEDIAFORMAT_KEY_VIDEO_QP_AVERAGE __INTRODUCED_IN(33);
+
 extern const char* AMEDIAFORMAT_VIDEO_QP_B_MAX __INTRODUCED_IN(31);
 extern const char* AMEDIAFORMAT_VIDEO_QP_B_MIN __INTRODUCED_IN(31);
 extern const char* AMEDIAFORMAT_VIDEO_QP_I_MAX __INTRODUCED_IN(31);
diff --git a/media/tests/SampleVideoEncoder/app/src/main/AndroidManifest.xml b/media/tests/SampleVideoEncoder/app/src/main/AndroidManifest.xml
index b17541d..75d73bf 100644
--- a/media/tests/SampleVideoEncoder/app/src/main/AndroidManifest.xml
+++ b/media/tests/SampleVideoEncoder/app/src/main/AndroidManifest.xml
@@ -30,7 +30,8 @@
         android:roundIcon="@mipmap/ic_launcher_round"
         android:supportsRtl="true"
         android:theme="@style/AppTheme">
-        <activity android:name="com.android.media.samplevideoencoder.MainActivity">
+        <activity android:name="com.android.media.samplevideoencoder.MainActivity"
+            android:exported="true">
             <intent-filter>
                 <action android:name="android.intent.action.MAIN" />
                 <category android:name="android.intent.category.LAUNCHER" />
@@ -42,4 +43,4 @@
         android:targetPackage="com.android.media.samplevideoencoder"
         android:label="SampleVideoEncoder Test"/>
 
-</manifest>
\ No newline at end of file
+</manifest>
diff --git a/media/tests/benchmark/MediaBenchmarkTest/Android.bp b/media/tests/benchmark/MediaBenchmarkTest/Android.bp
index 2e06da5..4b44dcf 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/Android.bp
+++ b/media/tests/benchmark/MediaBenchmarkTest/Android.bp
@@ -69,7 +69,6 @@
 java_defaults {
     name: "MediaBenchmark-defaults",
 
-    sdk_version: "system_current",
     min_sdk_version: "28",
-    target_sdk_version: "29",
+    target_sdk_version: "30",
 }
diff --git a/media/tests/benchmark/MediaBenchmarkTest/build.gradle b/media/tests/benchmark/MediaBenchmarkTest/build.gradle
index b2aee1a..b222d47 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/build.gradle
+++ b/media/tests/benchmark/MediaBenchmarkTest/build.gradle
@@ -17,21 +17,21 @@
 buildscript {
     repositories {
         google()
-        jcenter()
+        mavenCentral()
     }
     dependencies {
-        classpath 'com.android.tools.build:gradle:3.5.0'
+        classpath 'com.android.tools.build:gradle:4.2.1'
     }
 }
 
 apply plugin: 'com.android.application'
 
 android {
-    compileSdkVersion 29
+    compileSdkVersion 30
     defaultConfig {
         applicationId "com.android.media.benchmark"
         minSdkVersion 28
-        targetSdkVersion 29
+        targetSdkVersion 30
         versionCode 1
         versionName "1.0"
         testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
@@ -57,20 +57,20 @@
     externalNativeBuild {
         cmake {
             path "src/main/cpp/CMakeLists.txt"
-            version "3.10.2"
+            version "3.18.1"
         }
     }
 }
 
 repositories {
     google()
-    jcenter()
+    mavenCentral()
 }
 
 dependencies {
     implementation fileTree(dir: 'libs', include: ['*.jar'])
-    implementation 'androidx.appcompat:appcompat:1.1.0'
-    testImplementation 'junit:junit:4.12'
-    androidTestImplementation 'androidx.test:runner:1.2.0'
-    androidTestImplementation 'androidx.test.ext:junit:1.1.1'
+    implementation 'androidx.appcompat:appcompat:1.3.0'
+    testImplementation 'junit:junit:4.13.2'
+    androidTestImplementation 'androidx.test:runner:1.3.0'
+    androidTestImplementation 'androidx.test.ext:junit:1.1.2'
 }
\ No newline at end of file
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/Android.bp b/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/Android.bp
index af92424..0192d68 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/Android.bp
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/Android.bp
@@ -9,7 +9,6 @@
 
 cc_test_library {
     name: "libmediabenchmark_jni",
-    sdk_version: "current",
 
     defaults: [
         "libmediabenchmark_common-defaults",
diff --git a/media/tests/benchmark/src/native/common/Android.bp b/media/tests/benchmark/src/native/common/Android.bp
index 6b54c6a..718d217 100644
--- a/media/tests/benchmark/src/native/common/Android.bp
+++ b/media/tests/benchmark/src/native/common/Android.bp
@@ -55,7 +55,6 @@
 
 cc_defaults {
     name: "libmediabenchmark-defaults",
-    sdk_version: "current",
     stl: "c++_shared",
 
     shared_libs: [
diff --git a/media/tests/benchmark/src/native/extractor/Extractor.cpp b/media/tests/benchmark/src/native/extractor/Extractor.cpp
index f0bb3b9..3bdfbad 100644
--- a/media/tests/benchmark/src/native/extractor/Extractor.cpp
+++ b/media/tests/benchmark/src/native/extractor/Extractor.cpp
@@ -124,9 +124,7 @@
 
     int64_t sTime = mStats->getCurTime();
     if (mExtractor) {
-        // TODO: (b/140128505) Multiple calls result in DoS.
-        // Uncomment call to AMediaExtractor_delete() once this is resolved
-        // AMediaExtractor_delete(mExtractor);
+        AMediaExtractor_delete(mExtractor);
         mExtractor = nullptr;
     }
     int64_t eTime = mStats->getCurTime();
diff --git a/media/tests/benchmark/tests/Android.bp b/media/tests/benchmark/tests/Android.bp
index 0fbd20d..9a8caa3 100644
--- a/media/tests/benchmark/tests/Android.bp
+++ b/media/tests/benchmark/tests/Android.bp
@@ -33,7 +33,12 @@
 
     srcs: ["ExtractorTest.cpp"],
 
-    static_libs: ["libmediabenchmark_extractor"]
+    static_libs: ["libmediabenchmark_extractor"],
+
+    shared_libs: [
+        "libbase",
+        "libbinder_ndk",
+    ],
 }
 
 cc_test {
@@ -50,6 +55,11 @@
         "libmediabenchmark_extractor",
         "libmediabenchmark_decoder",
     ],
+
+    shared_libs: [
+        "libbase",
+        "libbinder_ndk",
+    ],
 }
 
 cc_test {
diff --git a/media/tests/benchmark/tests/DecoderTest.cpp b/media/tests/benchmark/tests/DecoderTest.cpp
index 81ef02a..3666724 100644
--- a/media/tests/benchmark/tests/DecoderTest.cpp
+++ b/media/tests/benchmark/tests/DecoderTest.cpp
@@ -21,6 +21,8 @@
 #include <iostream>
 #include <limits>
 
+#include <android/binder_process.h>
+
 #include "BenchmarkTestEnvironment.h"
 #include "Decoder.h"
 
@@ -175,6 +177,7 @@
                                             "c2.android.hevc.decoder", true)));
 
 int main(int argc, char **argv) {
+    ABinderProcess_startThreadPool();
     gEnv = new BenchmarkTestEnvironment();
     ::testing::AddGlobalTestEnvironment(gEnv);
     ::testing::InitGoogleTest(&argc, argv);
diff --git a/media/tests/benchmark/tests/ExtractorTest.cpp b/media/tests/benchmark/tests/ExtractorTest.cpp
index d14d15b..27ee9ba 100644
--- a/media/tests/benchmark/tests/ExtractorTest.cpp
+++ b/media/tests/benchmark/tests/ExtractorTest.cpp
@@ -19,6 +19,8 @@
 
 #include <gtest/gtest.h>
 
+#include <android/binder_process.h>
+
 #include "BenchmarkTestEnvironment.h"
 #include "Extractor.h"
 
@@ -73,6 +75,7 @@
                                                      0)));
 
 int main(int argc, char **argv) {
+    ABinderProcess_startThreadPool();
     gEnv = new BenchmarkTestEnvironment();
     ::testing::AddGlobalTestEnvironment(gEnv);
     ::testing::InitGoogleTest(&argc, argv);
diff --git a/media/utils/Android.bp b/media/utils/Android.bp
index 73c4e3b..88b822d 100644
--- a/media/utils/Android.bp
+++ b/media/utils/Android.bp
@@ -51,6 +51,7 @@
         "libpermission",
         "android.hardware.graphics.bufferqueue@1.0",
         "android.hidl.token@1.0-utils",
+        "packagemanager_aidl-cpp",
     ],
     export_static_lib_headers: [
         "libbatterystats_aidl",
diff --git a/media/utils/ServiceUtilities.cpp b/media/utils/ServiceUtilities.cpp
index 9c7b863..42f48a5 100644
--- a/media/utils/ServiceUtilities.cpp
+++ b/media/utils/ServiceUtilities.cpp
@@ -101,7 +101,11 @@
     AttributionSourceState myAttributionSource;
     myAttributionSource.uid = VALUE_OR_FATAL(android::legacy2aidl_uid_t_int32_t(getuid()));
     myAttributionSource.pid = VALUE_OR_FATAL(android::legacy2aidl_pid_t_int32_t(getpid()));
-    myAttributionSource.token = sp<BBinder>::make();
+    if (callerAttributionSource.token != nullptr) {
+        myAttributionSource.token = callerAttributionSource.token;
+    } else {
+        myAttributionSource.token = sp<BBinder>::make();
+    }
     myAttributionSource.next.push_back(nextAttributionSource);
 
     return std::optional<AttributionSourceState>{myAttributionSource};
diff --git a/media/utils/fuzzers/Android.bp b/media/utils/fuzzers/Android.bp
index c1698dc..c4dc24f 100644
--- a/media/utils/fuzzers/Android.bp
+++ b/media/utils/fuzzers/Android.bp
@@ -18,6 +18,7 @@
         "libutils",
         "libbinder",
         "framework-permission-aidl-cpp",
+        "packagemanager_aidl-cpp",
     ],
 
     cflags: [
diff --git a/media/utils/fuzzers/SchedulingPolicyServiceFuzz.cpp b/media/utils/fuzzers/SchedulingPolicyServiceFuzz.cpp
index 130feee..32fc3be 100644
--- a/media/utils/fuzzers/SchedulingPolicyServiceFuzz.cpp
+++ b/media/utils/fuzzers/SchedulingPolicyServiceFuzz.cpp
@@ -34,11 +34,16 @@
     const sp<IServiceManager> sm(defaultServiceManager());
     if (sm != nullptr) {
         const String16 name("batterystats");
-        batteryStatService = checked_interface_cast<IBatteryStats>(sm->checkService(name));
-        if (batteryStatService == nullptr) {
+        sp<IBinder> obj = sm->checkService(name);
+        if (!obj) {
             ALOGW("batterystats service unavailable!");
             return nullptr;
         }
+        batteryStatService = checked_interface_cast<IBatteryStats>(obj);
+        if (batteryStatService == nullptr) {
+            ALOGW("batterystats service interface is invalid");
+            return nullptr;
+        }
     }
     return batteryStatService;
 }
diff --git a/services/Android.mk b/services/Android.mk
new file mode 100644
index 0000000..c86a226
--- /dev/null
+++ b/services/Android.mk
@@ -0,0 +1 @@
+$(eval $(call declare-1p-copy-files,frameworks/av/services/audiopolicy,))
diff --git a/services/OWNERS b/services/OWNERS
index f0b5e2f..17e605d 100644
--- a/services/OWNERS
+++ b/services/OWNERS
@@ -1,9 +1,6 @@
-chz@google.com
 elaurent@google.com
 essick@google.com
 etalvala@google.com
-gkasten@google.com
 hunga@google.com
-marcone@google.com
 nchalko@google.com
 quxiangfang@google.com
diff --git a/services/audioflinger/Android.bp b/services/audioflinger/Android.bp
index b91f302..fecc183 100644
--- a/services/audioflinger/Android.bp
+++ b/services/audioflinger/Android.bp
@@ -81,6 +81,7 @@
         "libmedia_helper",
         "libshmemcompat",
         "libvibrator",
+        "packagemanager_aidl-cpp",
     ],
 
     static_libs: [
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 3a3fb5e..51f39a6 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -336,6 +336,24 @@
     return NO_ERROR;
 }
 
+status_t AudioFlinger::setDeviceConnectedState(const struct audio_port_v7 *port, bool connected) {
+    status_t final_result = NO_INIT;
+    Mutex::Autolock _l(mLock);
+    AutoMutex lock(mHardwareLock);
+    mHardwareStatus = AUDIO_HW_SET_CONNECTED_STATE;
+    for (size_t i = 0; i < mAudioHwDevs.size(); i++) {
+        sp<DeviceHalInterface> dev = mAudioHwDevs.valueAt(i)->hwDevice();
+        status_t result = dev->setConnectedState(port, connected);
+        // Same logic as with setParameter: it's a success if at least one
+        // HAL module accepts the update.
+        if (final_result != NO_ERROR) {
+            final_result = result;
+        }
+    }
+    mHardwareStatus = AUDIO_HW_IDLE;
+    return final_result;
+}
+
 // getDefaultVibratorInfo_l must be called with AudioFlinger lock held.
 std::optional<media::AudioVibratorInfo> AudioFlinger::getDefaultVibratorInfo_l() {
     if (mAudioVibratorInfos.empty()) {
@@ -998,8 +1016,9 @@
                 }
             }
         }
-
-        setAudioHwSyncForSession_l(thread, sessionId);
+        if ((output.flags & AUDIO_OUTPUT_FLAG_HW_AV_SYNC) == AUDIO_OUTPUT_FLAG_HW_AV_SYNC) {
+            setAudioHwSyncForSession_l(thread, sessionId);
+        }
     }
 
     if (lStatus != NO_ERROR) {
@@ -4212,6 +4231,7 @@
         case TransactionCode::SET_AUDIO_PORT_CONFIG:
         case TransactionCode::SET_RECORD_SILENCED:
         case TransactionCode::AUDIO_POLICY_READY:
+        case TransactionCode::SET_DEVICE_CONNECTED_STATE:
             ALOGW("%s: transaction %d received from PID %d",
                   __func__, code, IPCThreadState::self()->getCallingPid());
             // return status only for non void methods
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 8fcd6e4..d2317e8 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -282,6 +282,8 @@
     virtual status_t updateSecondaryOutputs(
             const TrackSecondaryOutputsMap& trackSecondaryOutputs);
 
+    virtual status_t setDeviceConnectedState(const struct audio_port_v7 *port, bool connected);
+
     status_t onTransactWrapper(TransactionCode code, const Parcel& data, uint32_t flags,
         const std::function<status_t()>& delegate) override;
 
@@ -904,6 +906,7 @@
         AUDIO_HW_SET_MASTER_MUTE,       // set_master_mute
         AUDIO_HW_GET_MASTER_MUTE,       // get_master_mute
         AUDIO_HW_GET_MICROPHONES,       // getMicrophones
+        AUDIO_HW_SET_CONNECTED_STATE,   // setConnectedState
     };
 
     mutable     hardware_call_state                 mHardwareStatus;    // for dump only
diff --git a/services/audioflinger/OWNERS b/services/audioflinger/OWNERS
index 034d161..17d4c37 100644
--- a/services/audioflinger/OWNERS
+++ b/services/audioflinger/OWNERS
@@ -1,4 +1,4 @@
-gkasten@google.com
 hunga@google.com
 jmtrivi@google.com
 mnaganov@google.com
+philburk@google.com
diff --git a/services/audioflinger/PatchPanel.cpp b/services/audioflinger/PatchPanel.cpp
index 93118b8..45dd258 100644
--- a/services/audioflinger/PatchPanel.cpp
+++ b/services/audioflinger/PatchPanel.cpp
@@ -575,6 +575,12 @@
 
     // create a special playback track to render to playback thread.
     // this track is given the same buffer as the PatchRecord buffer
+
+    // Default behaviour is to start as soon as possible to have the lowest possible latency even if
+    // it might glitch.
+    // Disable this behavior for FM Tuner source if no fast capture/mixer available.
+    const bool isFmBridge = mAudioPatch.sources[0].ext.device.type == AUDIO_DEVICE_IN_FM_TUNER;
+    const size_t frameCountToBeReady = isFmBridge && !usePassthruPatchRecord ? frameCount / 4 : 1;
     sp<PlaybackThread::PatchTrack> tempPatchTrack = new PlaybackThread::PatchTrack(
                                            mPlayback.thread().get(),
                                            streamType,
@@ -584,7 +590,9 @@
                                            frameCount,
                                            tempRecordTrack->buffer(),
                                            tempRecordTrack->bufferSize(),
-                                           outputFlags);
+                                           outputFlags,
+                                           {} /*timeout*/,
+                                           frameCountToBeReady);
     status = mPlayback.checkTrack(tempPatchTrack.get());
     if (status != NO_ERROR) {
         return status;
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index 3cce998..aecd4d3 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -193,6 +193,12 @@
        }
     }
 
+    static bool checkServerLatencySupported(
+            audio_format_t format, audio_output_flags_t flags) {
+        return audio_is_linear_pcm(format)
+                && (flags & AUDIO_OUTPUT_FLAG_HW_AV_SYNC) == 0;
+    }
+
     audio_output_flags_t getOutputFlags() const { return mFlags; }
     float getSpeed() const { return mSpeed; }
 
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index e0da037..09e4078 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -5879,6 +5879,20 @@
     return trackCount;
 }
 
+bool AudioFlinger::PlaybackThread::checkRunningTimestamp()
+{
+    uint64_t position = 0;
+    struct timespec unused;
+    const status_t ret = mOutput->getPresentationPosition(&position, &unused);
+    if (ret == NO_ERROR) {
+        if (position != mLastCheckedTimestampPosition) {
+            mLastCheckedTimestampPosition = position;
+            return true;
+        }
+    }
+    return false;
+}
+
 // isTrackAllowed_l() must be called with ThreadBase::mLock held
 bool AudioFlinger::MixerThread::isTrackAllowed_l(
         audio_channel_mask_t channelMask, audio_format_t format,
@@ -6307,19 +6321,24 @@
                 // fill a buffer, then remove it from active list.
                 // Only consider last track started for mixer state control
                 if (--(track->mRetryCount) <= 0) {
-                    ALOGV("BUFFER TIMEOUT: remove track(%d) from active list", trackId);
-                    tracksToRemove->add(track);
-                    // indicate to client process that the track was disabled because of underrun;
-                    // it will then automatically call start() when data is available
-                    track->disable();
-                    // only do hw pause when track is going to be removed due to BUFFER TIMEOUT.
-                    // unlike mixerthread, HAL can be paused for direct output
-                    ALOGW("pause because of UNDERRUN, framesReady = %zu,"
-                            "minFrames = %u, mFormat = %#x",
-                            framesReady, minFrames, mFormat);
-                    if (last && mHwSupportsPause && !mHwPaused && !mStandby) {
-                        doHwPause = true;
-                        mHwPaused = true;
+                    const bool running = checkRunningTimestamp();
+                    if (running) { // still running, give us more time.
+                        track->mRetryCount = kMaxTrackRetriesOffload;
+                    } else {
+                        ALOGV("BUFFER TIMEOUT: remove track(%d) from active list", trackId);
+                        tracksToRemove->add(track);
+                        // indicate to client process that the track was disabled because of
+                        // underrun; it will then automatically call start() when data is available
+                        track->disable();
+                        // only do hw pause when track is going to be removed due to BUFFER TIMEOUT.
+                        // unlike mixerthread, HAL can be paused for direct output
+                        ALOGW("pause because of UNDERRUN, framesReady = %zu,"
+                                "minFrames = %u, mFormat = %#x",
+                                framesReady, minFrames, mFormat);
+                        if (last && mHwSupportsPause && !mHwPaused && !mStandby) {
+                            doHwPause = true;
+                            mHwPaused = true;
+                        }
                     }
                 } else if (last) {
                     mixerStatus = MIXER_TRACKS_ENABLED;
@@ -6530,6 +6549,7 @@
 
 void AudioFlinger::DirectOutputThread::flushHw_l()
 {
+    PlaybackThread::flushHw_l();
     mOutput->flush();
     mHwPaused = false;
     mFlushPending = false;
@@ -6665,8 +6685,7 @@
 AudioFlinger::OffloadThread::OffloadThread(const sp<AudioFlinger>& audioFlinger,
         AudioStreamOut* output, audio_io_handle_t id, bool systemReady)
     :   DirectOutputThread(audioFlinger, output, id, OFFLOAD, systemReady),
-        mPausedWriteLength(0), mPausedBytesRemaining(0), mKeepWakeLock(true),
-        mOffloadUnderrunPosition(~0LL)
+        mPausedWriteLength(0), mPausedBytesRemaining(0), mKeepWakeLock(true)
 {
     //FIXME: mStandby should be set to true by ThreadBase constructo
     mStandby = true;
@@ -6883,19 +6902,7 @@
                 // No buffers for this track. Give it a few chances to
                 // fill a buffer, then remove it from active list.
                 if (--(track->mRetryCount) <= 0) {
-                    bool running = false;
-                    uint64_t position = 0;
-                    struct timespec unused;
-                    // The running check restarts the retry counter at least once.
-                    status_t ret = mOutput->stream->getPresentationPosition(&position, &unused);
-                    if (ret == NO_ERROR && position != mOffloadUnderrunPosition) {
-                        running = true;
-                        mOffloadUnderrunPosition = position;
-                    }
-                    if (ret == NO_ERROR) {
-                        ALOGVV("underrun counter, running(%d): %lld vs %lld", running,
-                                (long long)position, (long long)mOffloadUnderrunPosition);
-                    }
+                    const bool running = checkRunningTimestamp();
                     if (running) { // still running, give us more time.
                         track->mRetryCount = kMaxTrackRetriesOffload;
                     } else {
@@ -6966,7 +6973,6 @@
     mPausedBytesRemaining = 0;
     // reset bytes written count to reflect that DSP buffers are empty after flush.
     mBytesWritten = 0;
-    mOffloadUnderrunPosition = ~0LL;
 
     if (mUseAsyncWrite) {
         // discard any pending drain or write ack by incrementing sequence
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index 8561de3..04ad20e 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -1375,6 +1375,14 @@
                 struct audio_patch mDownStreamPatch;
 
                 std::atomic_bool mCheckOutputStageEffects{};
+
+                // A differential check on the timestamps to see if there is a change in the
+                // timestamp frame position between the last call to checkRunningTimestamp.
+                uint64_t mLastCheckedTimestampPosition = ~0LL;
+
+                bool checkRunningTimestamp();
+
+    virtual     void flushHw_l() { mLastCheckedTimestampPosition = ~0LL; }
 };
 
 class MixerThread : public PlaybackThread {
@@ -1492,7 +1500,7 @@
     virtual     bool        checkForNewParameter_l(const String8& keyValuePair,
                                                    status_t& status);
 
-    virtual     void        flushHw_l();
+                void        flushHw_l() override;
 
                 void        setMasterBalance(float balance) override;
 
@@ -1557,7 +1565,7 @@
     OffloadThread(const sp<AudioFlinger>& audioFlinger, AudioStreamOut* output,
                   audio_io_handle_t id, bool systemReady);
     virtual                 ~OffloadThread() {};
-    virtual     void        flushHw_l();
+                void        flushHw_l() override;
 
 protected:
     // threadLoop snippets
@@ -1574,10 +1582,6 @@
     size_t      mPausedWriteLength;     // length in bytes of write interrupted by pause
     size_t      mPausedBytesRemaining;  // bytes still waiting in mixbuffer after resume
     bool        mKeepWakeLock;          // keep wake lock while waiting for write callback
-    uint64_t    mOffloadUnderrunPosition; // Current frame position for offloaded playback
-                                          // used and valid only during underrun.  ~0 if
-                                          // no underrun has occurred during playback and
-                                          // is not reset on standby.
 };
 
 class AsyncCallbackThread : public Thread {
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index e0c5fa5..233865f 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -713,8 +713,7 @@
         thread->mFastTrackAvailMask &= ~(1 << i);
     }
 
-    mServerLatencySupported = thread->type() == ThreadBase::MIXER
-            || thread->type() == ThreadBase::DUPLICATING;
+    mServerLatencySupported = checkServerLatencySupported(format, flags);
 #ifdef TEE_SINK
     mTee.setId(std::string("_") + std::to_string(mThreadIoHandle)
             + "_" + std::to_string(mId) + "_T");
@@ -1405,6 +1404,60 @@
             .content_type = mAttr.content_type,
             .gain = mFinalVolume,
     };
+
+    // When attributes are undefined, derive default values from stream type.
+    // See AudioAttributes.java, usageForStreamType() and Builder.setInternalLegacyStreamType()
+    if (mAttr.usage == AUDIO_USAGE_UNKNOWN) {
+        switch (mStreamType) {
+        case AUDIO_STREAM_VOICE_CALL:
+            metadata.base.usage = AUDIO_USAGE_VOICE_COMMUNICATION;
+            metadata.base.content_type = AUDIO_CONTENT_TYPE_SPEECH;
+            break;
+        case AUDIO_STREAM_SYSTEM:
+            metadata.base.usage = AUDIO_USAGE_ASSISTANCE_SONIFICATION;
+            metadata.base.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
+            break;
+        case AUDIO_STREAM_RING:
+            metadata.base.usage = AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE;
+            metadata.base.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
+            break;
+        case AUDIO_STREAM_MUSIC:
+            metadata.base.usage = AUDIO_USAGE_MEDIA;
+            metadata.base.content_type = AUDIO_CONTENT_TYPE_MUSIC;
+            break;
+        case AUDIO_STREAM_ALARM:
+            metadata.base.usage = AUDIO_USAGE_ALARM;
+            metadata.base.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
+            break;
+        case AUDIO_STREAM_NOTIFICATION:
+            metadata.base.usage = AUDIO_USAGE_NOTIFICATION;
+            metadata.base.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
+            break;
+        case AUDIO_STREAM_DTMF:
+            metadata.base.usage = AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING;
+            metadata.base.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
+            break;
+        case AUDIO_STREAM_ACCESSIBILITY:
+            metadata.base.usage = AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY;
+            metadata.base.content_type = AUDIO_CONTENT_TYPE_SPEECH;
+            break;
+        case AUDIO_STREAM_ASSISTANT:
+            metadata.base.usage = AUDIO_USAGE_ASSISTANT;
+            metadata.base.content_type = AUDIO_CONTENT_TYPE_SPEECH;
+            break;
+        case AUDIO_STREAM_REROUTING:
+            metadata.base.usage = AUDIO_USAGE_VIRTUAL_SOURCE;
+            // unknown content type
+            break;
+        case AUDIO_STREAM_CALL_ASSISTANT:
+            metadata.base.usage = AUDIO_USAGE_CALL_ASSISTANT;
+            metadata.base.content_type = AUDIO_CONTENT_TYPE_SPEECH;
+            break;
+        default:
+            break;
+        }
+    }
+
     metadata.channel_mask = mChannelMask,
     strncpy(metadata.tags, mAttr.tags, AUDIO_ATTRIBUTES_TAGS_MAX_SIZE);
     *backInserter++ = metadata;
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index 4078278..39c3dc5 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -285,8 +285,8 @@
 
     virtual bool     isHapticPlaybackSupported() = 0;
 
-    virtual status_t getHwOffloadEncodingFormatsSupportedForA2DP(
-                std::vector<audio_format_t> *formats) = 0;
+    virtual status_t getHwOffloadFormatsSupportedForBluetoothMedia(
+                audio_devices_t device, std::vector<audio_format_t> *formats) = 0;
 
     virtual void     setAppState(audio_port_handle_t portId, app_state_t state) = 0;
 
@@ -504,6 +504,8 @@
 
     virtual status_t updateSecondaryOutputs(
             const TrackSecondaryOutputsMap& trackSecondaryOutputs) = 0;
+
+    virtual status_t setDeviceConnectedState(const struct audio_port_v7 *port, bool connected) = 0;
 };
 
     // These are the signatures of createAudioPolicyManager/destroyAudioPolicyManager
diff --git a/services/audiopolicy/common/include/Volume.h b/services/audiopolicy/common/include/Volume.h
index 736f8b2..f0636a0 100644
--- a/services/audiopolicy/common/include/Volume.h
+++ b/services/audiopolicy/common/include/Volume.h
@@ -127,6 +127,7 @@
         case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES:
         case AUDIO_DEVICE_OUT_USB_HEADSET:
         case AUDIO_DEVICE_OUT_BLE_HEADSET:
+        case AUDIO_DEVICE_OUT_BLE_BROADCAST:
             return DEVICE_CATEGORY_HEADSET;
         case AUDIO_DEVICE_OUT_HEARING_AID:
             return DEVICE_CATEGORY_HEARING_AID;
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
index 7c7f02d..8aab634 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
@@ -158,7 +158,7 @@
     virtual bool isDuplicated() const { return false; }
     virtual uint32_t latency() { return 0; }
     virtual bool isFixedVolume(const DeviceTypeSet& deviceTypes);
-    virtual bool setVolume(float volumeDb,
+    virtual bool setVolume(float volumeDb, bool muted,
                            VolumeSource volumeSource, const StreamTypeVector &streams,
                            const DeviceTypeSet& deviceTypes,
                            uint32_t delayMs,
@@ -352,7 +352,22 @@
             setClientActive(client, false);
         }
     }
-    virtual bool setVolume(float volumeDb,
+
+    /**
+     * @brief setSwMute for SwOutput routed on a device that supports Hw Gain, this function allows
+     * to mute the tracks associated to a given volume source only.
+     * As an output may host one or more source(s), and as AudioPolicyManager may dispatch or not
+     * the volume change request according to the priority of the volume source to control the
+     * unique hw gain controller, a separated API allows to force a mute/unmute of a volume source.
+     * @param muted true to mute, false otherwise
+     * @param vs volume source to be considered
+     * @param device scoped for the change
+     * @param delayMs potentially applyed to prevent cut sounds.
+     */
+    void setSwMute(bool muted, VolumeSource vs, const StreamTypeVector &streams,
+                   const DeviceTypeSet& device, uint32_t delayMs);
+
+    virtual bool setVolume(float volumeDb, bool muted,
                            VolumeSource volumeSource, const StreamTypeVector &streams,
                            const DeviceTypeSet& device,
                            uint32_t delayMs,
@@ -437,7 +452,7 @@
 
             void dump(String8 *dst) const override;
 
-    virtual bool setVolume(float volumeDb,
+    virtual bool setVolume(float volumeDb, bool muted,
                            VolumeSource volumeSource, const StreamTypeVector &streams,
                            const DeviceTypeSet& deviceTypes,
                            uint32_t delayMs,
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index f3d2326..5c3bdb3 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -155,7 +155,7 @@
     return false;
 }
 
-bool AudioOutputDescriptor::setVolume(float volumeDb,
+bool AudioOutputDescriptor::setVolume(float volumeDb, bool /*muted*/,
                                       VolumeSource volumeSource,
                                       const StreamTypeVector &/*streams*/,
                                       const DeviceTypeSet& deviceTypes,
@@ -435,14 +435,36 @@
             mFlags & AUDIO_OUTPUT_FLAG_FAST ? AUDIO_LATENCY_LOW : AUDIO_LATENCY_NORMAL;
 }
 
-bool SwAudioOutputDescriptor::setVolume(float volumeDb,
+void SwAudioOutputDescriptor::setSwMute(
+        bool muted, VolumeSource vs, const StreamTypeVector &streamTypes,
+        const DeviceTypeSet& deviceTypes, uint32_t delayMs) {
+    // volume source active and more than one volume source is active, otherwise, no-op or let
+    // setVolume controlling SW and/or HW Gains
+    if (!streamTypes.empty() && isActive(vs) && (getActiveVolumeSources().size() > 1)) {
+        for (const auto& devicePort : devices()) {
+            if (isSingleDeviceType(deviceTypes, devicePort->type()) &&
+                    devicePort->hasGainController(true /*canUseForVolume*/)) {
+                float volumeAmpl = muted ? 0.0f : Volume::DbToAmpl(0);
+                ALOGV("%s: output: %d, vs: %d, muted: %d, active vs count: %zu", __func__,
+                      mIoHandle, vs, muted, getActiveVolumeSources().size());
+                for (const auto &stream : streamTypes) {
+                    mClientInterface->setStreamVolume(stream, volumeAmpl, mIoHandle, delayMs);
+                }
+                return;
+            }
+        }
+    }
+}
+
+bool SwAudioOutputDescriptor::setVolume(float volumeDb, bool muted,
                                         VolumeSource vs, const StreamTypeVector &streamTypes,
                                         const DeviceTypeSet& deviceTypes,
                                         uint32_t delayMs,
                                         bool force)
 {
     StreamTypeVector streams = streamTypes;
-    if (!AudioOutputDescriptor::setVolume(volumeDb, vs, streamTypes, deviceTypes, delayMs, force)) {
+    if (!AudioOutputDescriptor::setVolume(
+            volumeDb, muted, vs, streamTypes, deviceTypes, delayMs, force)) {
         return false;
     }
     if (streams.empty()) {
@@ -459,11 +481,17 @@
             // different Volume Source (or if we allow several curves within same volume group)
             //
             // @todo: default stream volume to max (0) when using HW Port gain?
-            float volumeAmpl = Volume::DbToAmpl(0);
-            for (const auto &stream : streams) {
-                mClientInterface->setStreamVolume(stream, volumeAmpl, mIoHandle, delayMs);
+            // Allows to set SW Gain on AudioFlinger if:
+            //    -volume group has explicit stream(s) associated
+            //    -volume group with no explicit stream(s) is the only active source on this output
+            // Allows to mute SW Gain on AudioFlinger only for volume group with explicit stream(s)
+            if (!streamTypes.empty() || (getActiveVolumeSources().size() == 1)) {
+                const bool canMute = muted && (volumeDb != 0.0f) && !streamTypes.empty();
+                float volumeAmpl = canMute ? 0.0f : Volume::DbToAmpl(0);
+                for (const auto &stream : streams) {
+                    mClientInterface->setStreamVolume(stream, volumeAmpl, mIoHandle, delayMs);
+                }
             }
-
             AudioGains gains = devicePort->getGains();
             int gainMinValueInMb = gains[0]->getMinValueInMb();
             int gainMaxValueInMb = gains[0]->getMaxValueInMb();
@@ -698,14 +726,14 @@
 }
 
 
-bool HwAudioOutputDescriptor::setVolume(float volumeDb,
+bool HwAudioOutputDescriptor::setVolume(float volumeDb, bool muted,
                                         VolumeSource volumeSource, const StreamTypeVector &streams,
                                         const DeviceTypeSet& deviceTypes,
                                         uint32_t delayMs,
                                         bool force)
 {
     bool changed = AudioOutputDescriptor::setVolume(
-            volumeDb, volumeSource, streams, deviceTypes, delayMs, force);
+            volumeDb, muted, volumeSource, streams, deviceTypes, delayMs, force);
 
     if (changed) {
       // TODO: use gain controller on source device if any to adjust volume
diff --git a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
index 1722032..c9c8ede 100644
--- a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
@@ -58,15 +58,6 @@
         mDeclaredAddress(DeviceDescriptorBase::address())
 {
     mCurrentEncodedFormat = AUDIO_FORMAT_DEFAULT;
-    /* If framework runs against a pre 5.0 Audio HAL, encoded formats are absent from the config.
-     * FIXME: APM should know the version of the HAL and don't add the formats for V5.0.
-     * For now, the workaround to remove AC3 and IEC61937 support on HDMI is to declare
-     * something like 'encodedFormats="AUDIO_FORMAT_PCM_16_BIT"' on the HDMI devicePort.
-     */
-    if (mDeviceTypeAddr.mType == AUDIO_DEVICE_OUT_HDMI && mEncodedFormats.empty()) {
-        mEncodedFormats.push_back(AUDIO_FORMAT_AC3);
-        mEncodedFormats.push_back(AUDIO_FORMAT_IEC61937);
-    }
 }
 
 void DeviceDescriptor::attach(const sp<HwModule>& module)
diff --git a/services/audiopolicy/common/managerdefinitions/src/PolicyAudioPort.cpp b/services/audiopolicy/common/managerdefinitions/src/PolicyAudioPort.cpp
index 8c61b90..5986069 100644
--- a/services/audiopolicy/common/managerdefinitions/src/PolicyAudioPort.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/PolicyAudioPort.cpp
@@ -30,9 +30,9 @@
 // --- PolicyAudioPort class implementation
 void PolicyAudioPort::attach(const sp<HwModule>& module)
 {
+    mModule = module;
     ALOGV("%s: attaching module %s to port %s",
             __FUNCTION__, getModuleName(), asAudioPort()->getName().c_str());
-    mModule = module;
 }
 
 void PolicyAudioPort::detach()
diff --git a/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp b/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
index 84ed656..a631963 100644
--- a/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
@@ -861,10 +861,10 @@
         ALOGE("%s: No version found in root node %s", __func__, rootName);
         return BAD_VALUE;
     }
-    if (version == "7.0") {
+    if (version == "7.0" || version == "7.1") {
         mChannelMasksSeparator = mSamplingRatesSeparator = mFlagsSeparator = " ";
     } else if (version != "1.0") {
-        ALOGE("%s: Version does not match; expected \"1.0\" or \"7.0\" got \"%s\"",
+        ALOGE("%s: Version does not match; expected \"1.0\", \"7.0\", or \"7.1\" got \"%s\"",
                 __func__, version.c_str());
         return BAD_VALUE;
     }
diff --git a/services/audiopolicy/config/bluetooth_audio_policy_configuration.xml b/services/audiopolicy/config/bluetooth_audio_policy_configuration.xml
index 98415b7..d34cca0 100644
--- a/services/audiopolicy/config/bluetooth_audio_policy_configuration.xml
+++ b/services/audiopolicy/config/bluetooth_audio_policy_configuration.xml
@@ -11,16 +11,17 @@
                      channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
         </mixPort>
         <!-- Le Audio Audio Ports -->
-        <mixPort name="le audio output" role="source">
+        <mixPort name="le audio output" role="source"/>
+        <mixPort name="le audio input" role="sink">
             <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
                      samplingRates="8000,16000,24000,32000,44100,48000"
-                     channelMasks="AUDIO_CHANNEL_OUT_MONO,AUDIO_CHANNEL_OUT_STEREO"/>
+                     channelMasks="AUDIO_CHANNEL_IN_MONO,AUDIO_CHANNEL_IN_STEREO"/>
             <profile name="" format="AUDIO_FORMAT_PCM_24_BIT_PACKED"
                      samplingRates="8000,16000,24000,32000,44100,48000"
-                     channelMasks="AUDIO_CHANNEL_OUT_MONO,AUDIO_CHANNEL_OUT_STEREO"/>
+                     channelMasks="AUDIO_CHANNEL_IN_MONO,AUDIO_CHANNEL_IN_STEREO"/>
             <profile name="" format="AUDIO_FORMAT_PCM_32_BIT"
                      samplingRates="8000,16000,24000,32000,44100,48000"
-                     channelMasks="AUDIO_CHANNEL_OUT_MONO,AUDIO_CHANNEL_OUT_STEREO"/>
+                     channelMasks="AUDIO_CHANNEL_IN_MONO,AUDIO_CHANNEL_IN_STEREO"/>
         </mixPort>
     </mixPorts>
     <devicePorts>
@@ -49,6 +50,7 @@
         -->
         <devicePort tagName="BLE Headset Out" type="AUDIO_DEVICE_OUT_BLE_HEADSET" role="sink"/>
         <devicePort tagName="BLE Speaker Out" type="AUDIO_DEVICE_OUT_BLE_SPEAKER" role="sink"/>
+        <devicePort tagName="BLE Headset In" type="AUDIO_DEVICE_IN_BLE_HEADSET" role="source"/>
     </devicePorts>
     <routes>
         <route type="mix" sink="BT A2DP Out"
@@ -61,6 +63,8 @@
                sources="hearing aid output"/>
         <route type="mix" sink="BLE Headset Out"
                sources="le audio output"/>
+        <route type="mix" sink="le audio input"
+               sources="BLE Headset In"/>
         <route type="mix" sink="BLE Speaker Out"
                sources="le audio output"/>
     </routes>
diff --git a/services/audiopolicy/config/bluetooth_audio_policy_configuration_7_0.xml b/services/audiopolicy/config/bluetooth_audio_policy_configuration_7_0.xml
index fbe7571..ef92d08 100644
--- a/services/audiopolicy/config/bluetooth_audio_policy_configuration_7_0.xml
+++ b/services/audiopolicy/config/bluetooth_audio_policy_configuration_7_0.xml
@@ -11,16 +11,17 @@
                      channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
         </mixPort>
         <!-- Le Audio Audio Ports -->
-        <mixPort name="le audio output" role="source">
+        <mixPort name="le audio output" role="source"/>
+        <mixPort name="le audio input" role="sink">
             <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
                      samplingRates="8000 16000 24000 32000 44100 48000"
-                     channelMasks="AUDIO_CHANNEL_OUT_MONO AUDIO_CHANNEL_OUT_STEREO"/>
+                     channelMasks="AUDIO_CHANNEL_IN_MONO AUDIO_CHANNEL_IN_STEREO"/>
             <profile name="" format="AUDIO_FORMAT_PCM_24_BIT_PACKED"
                      samplingRates="8000 16000 24000 32000 44100 48000"
-                     channelMasks="AUDIO_CHANNEL_OUT_MONO AUDIO_CHANNEL_OUT_STEREO"/>
+                     channelMasks="AUDIO_CHANNEL_IN_MONO AUDIO_CHANNEL_IN_STEREO"/>
             <profile name="" format="AUDIO_FORMAT_PCM_32_BIT"
                      samplingRates="8000 16000 24000 32000 44100 48000"
-                     channelMasks="AUDIO_CHANNEL_OUT_MONO AUDIO_CHANNEL_OUT_STEREO"/>
+                     channelMasks="AUDIO_CHANNEL_IN_MONO AUDIO_CHANNEL_IN_STEREO"/>
         </mixPort>
     </mixPorts>
     <devicePorts>
@@ -45,6 +46,7 @@
         <!-- BLE Audio Ports -->
         <devicePort tagName="BLE Headset Out" type="AUDIO_DEVICE_OUT_BLE_HEADSET" role="sink"/>
         <devicePort tagName="BLE Speaker Out" type="AUDIO_DEVICE_OUT_BLE_SPEAKER" role="sink"/>
+        <devicePort tagName="BLE Headset In" type="AUDIO_DEVICE_IN_BLE_HEADSET" role="source"/>
     </devicePorts>
     <routes>
         <route type="mix" sink="BT A2DP Out"
@@ -57,6 +59,8 @@
                sources="hearing aid output"/>
         <route type="mix" sink="BLE Headset Out"
                sources="le audio output"/>
+        <route type="mix" sink="le audio input"
+               sources="BLE Headset In"/>
         <route type="mix" sink="BLE Speaker Out"
                sources="le audio output"/>
     </routes>
diff --git a/services/audiopolicy/config/le_audio_policy_configuration.xml b/services/audiopolicy/config/le_audio_policy_configuration.xml
index a3dc72b..dcdd805 100644
--- a/services/audiopolicy/config/le_audio_policy_configuration.xml
+++ b/services/audiopolicy/config/le_audio_policy_configuration.xml
@@ -7,13 +7,20 @@
                      samplingRates="8000,16000,24000,32000,44100,48000"
                      channelMasks="AUDIO_CHANNEL_OUT_MONO,AUDIO_CHANNEL_OUT_STEREO"/>
         </mixPort>
+        <mixPort name="le audio input" role="sink">
+            <profile name="" format="AUDIO_FORMAT_PCM_16_BIT,AUDIO_FORMAT_PCM_24_BIT,AUDIO_FORMAT_PCM_32_BIT"
+                     samplingRates="8000,16000,24000,32000,44100,48000"
+                     channelMasks="AUDIO_CHANNEL_IN_MONO,AUDIO_CHANNEL_IN_STEREO"/>
+        </mixPort>
     </mixPorts>
     <devicePorts>
         <devicePort tagName="BLE Headset Out" type="AUDIO_DEVICE_OUT_BLE_HEADSET" role="sink"/>
         <devicePort tagName="BLE Speaker Out" type="AUDIO_DEVICE_OUT_BLE_SPEAKER" role="sink"/>
+        <devicePort tagName="BLE Headset In" type="AUDIO_DEVICE_IN_BLE_HEADSET" role="source"/>
     </devicePorts>
     <routes>
         <route type="mix" sink="BLE Headset Out" sources="le audio output"/>
         <route type="mix" sink="BLE Speaker Out" sources="le audio output"/>
+        <route type="mix" sink="le audio input" sources="BLE Headset In"/>
     </routes>
 </module>
diff --git a/services/audiopolicy/engine/common/src/EngineDefaultConfig.h b/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
index d39eff6..665c2dd 100644
--- a/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
+++ b/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
@@ -69,12 +69,6 @@
           {
               {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_NOTIFICATION, AUDIO_SOURCE_DEFAULT,
                AUDIO_FLAG_NONE, ""},
-              {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST,
-               AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
-              {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT,
-               AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
-              {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED,
-               AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""},
               {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_NOTIFICATION_EVENT,
                AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""}
           }
diff --git a/services/audiopolicy/engine/common/src/LastRemovableMediaDevices.cpp b/services/audiopolicy/engine/common/src/LastRemovableMediaDevices.cpp
index b3f8947..06cc799 100644
--- a/services/audiopolicy/engine/common/src/LastRemovableMediaDevices.cpp
+++ b/services/audiopolicy/engine/common/src/LastRemovableMediaDevices.cpp
@@ -85,6 +85,7 @@
     case AUDIO_DEVICE_OUT_HEARING_AID:
     case AUDIO_DEVICE_OUT_BLE_HEADSET:
     case AUDIO_DEVICE_OUT_BLE_SPEAKER:
+    case AUDIO_DEVICE_OUT_BLE_BROADCAST:
         return GROUP_BT_A2DP;
     default:
         return GROUP_NONE;
diff --git a/services/audiopolicy/engineconfigurable/config/example/phone/audio_policy_engine_product_strategies.xml b/services/audiopolicy/engineconfigurable/config/example/phone/audio_policy_engine_product_strategies.xml
index bc32416..0ddf66d 100644
--- a/services/audiopolicy/engineconfigurable/config/example/phone/audio_policy_engine_product_strategies.xml
+++ b/services/audiopolicy/engineconfigurable/config/example/phone/audio_policy_engine_product_strategies.xml
@@ -57,9 +57,6 @@
     <ProductStrategy name="STRATEGY_SONIFICATION_RESPECTFUL">
         <AttributesGroup streamType="AUDIO_STREAM_NOTIFICATION" volumeGroup="notification">
             <Attributes> <Usage value="AUDIO_USAGE_NOTIFICATION"/> </Attributes>
-            <Attributes> <Usage value="AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST"/> </Attributes>
-            <Attributes> <Usage value="AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT"/> </Attributes>
-            <Attributes> <Usage value="AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED"/> </Attributes>
             <Attributes> <Usage value="AUDIO_USAGE_NOTIFICATION_EVENT"/> </Attributes>
         </AttributesGroup>
     </ProductStrategy>
diff --git a/services/audiopolicy/engineconfigurable/src/Engine.cpp b/services/audiopolicy/engineconfigurable/src/Engine.cpp
index b0c376a..9a61a05 100644
--- a/services/audiopolicy/engineconfigurable/src/Engine.cpp
+++ b/services/audiopolicy/engineconfigurable/src/Engine.cpp
@@ -299,8 +299,13 @@
     if (device != nullptr) {
         return DeviceVector(device);
     }
+    return fromCache? getCachedDevices(strategy) : getDevicesForProductStrategy(strategy);
+}
 
-    return fromCache? mDevicesForStrategies.at(strategy) : getDevicesForProductStrategy(strategy);
+DeviceVector Engine::getCachedDevices(product_strategy_t ps) const
+{
+    return mDevicesForStrategies.find(ps) != mDevicesForStrategies.end() ?
+                mDevicesForStrategies.at(ps) : DeviceVector{};
 }
 
 DeviceVector Engine::getOutputDevicesForStream(audio_stream_type_t stream, bool fromCache) const
diff --git a/services/audiopolicy/engineconfigurable/src/Engine.h b/services/audiopolicy/engineconfigurable/src/Engine.h
index d8e2742..f665da5 100644
--- a/services/audiopolicy/engineconfigurable/src/Engine.h
+++ b/services/audiopolicy/engineconfigurable/src/Engine.h
@@ -126,6 +126,7 @@
     status_t loadAudioPolicyEngineConfig();
 
     DeviceVector getDevicesForProductStrategy(product_strategy_t strategy) const;
+    DeviceVector getCachedDevices(product_strategy_t ps) const;
 
     /**
      * Policy Parameter Manager hidden through a wrapper.
diff --git a/services/audiopolicy/enginedefault/config/example/phone/audio_policy_engine_product_strategies.xml b/services/audiopolicy/enginedefault/config/example/phone/audio_policy_engine_product_strategies.xml
index bc32416..0ddf66d 100644
--- a/services/audiopolicy/enginedefault/config/example/phone/audio_policy_engine_product_strategies.xml
+++ b/services/audiopolicy/enginedefault/config/example/phone/audio_policy_engine_product_strategies.xml
@@ -57,9 +57,6 @@
     <ProductStrategy name="STRATEGY_SONIFICATION_RESPECTFUL">
         <AttributesGroup streamType="AUDIO_STREAM_NOTIFICATION" volumeGroup="notification">
             <Attributes> <Usage value="AUDIO_USAGE_NOTIFICATION"/> </Attributes>
-            <Attributes> <Usage value="AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST"/> </Attributes>
-            <Attributes> <Usage value="AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT"/> </Attributes>
-            <Attributes> <Usage value="AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED"/> </Attributes>
             <Attributes> <Usage value="AUDIO_USAGE_NOTIFICATION_EVENT"/> </Attributes>
         </AttributesGroup>
     </ProductStrategy>
diff --git a/services/audiopolicy/fuzzer/Android.bp b/services/audiopolicy/fuzzer/Android.bp
index faf15d6..9f6b703 100644
--- a/services/audiopolicy/fuzzer/Android.bp
+++ b/services/audiopolicy/fuzzer/Android.bp
@@ -62,4 +62,7 @@
         "libaudiopolicymanager_interface_headers",
     ],
     data: [":audiopolicyfuzzer_configuration_files"],
+    fuzz_config: {
+       cc: ["mnaganov@google.com"],
+    },
 }
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index e334532..3cfb944 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -112,11 +112,14 @@
 void AudioPolicyManager::broadcastDeviceConnectionState(const sp<DeviceDescriptor> &device,
                                                         audio_policy_dev_state_t state)
 {
-    AudioParameter param(String8(device->address().c_str()));
-    const String8 key(state == AUDIO_POLICY_DEVICE_STATE_AVAILABLE ?
-                AudioParameter::keyDeviceConnect : AudioParameter::keyDeviceDisconnect);
-    param.addInt(key, device->type());
-    mpClientInterface->setParameters(AUDIO_IO_HANDLE_NONE, param.toString());
+    audio_port_v7 devicePort;
+    device->toAudioPort(&devicePort);
+    if (status_t status = mpClientInterface->setDeviceConnectedState(
+                    &devicePort, state == AUDIO_POLICY_DEVICE_STATE_AVAILABLE);
+            status != OK) {
+        ALOGE("Error %d while setting connected state for device %s", status,
+                device->getDeviceTypeAddr().toString(false).c_str());
+    }
 }
 
 status_t AudioPolicyManager::setDeviceConnectionStateInt(audio_devices_t deviceType,
@@ -525,10 +528,10 @@
     return NO_ERROR;
 }
 
-status_t AudioPolicyManager::getHwOffloadEncodingFormatsSupportedForA2DP(
-                                    std::vector<audio_format_t> *formats)
+status_t AudioPolicyManager::getHwOffloadFormatsSupportedForBluetoothMedia(
+                                    audio_devices_t device, std::vector<audio_format_t> *formats)
 {
-    ALOGV("getHwOffloadEncodingFormatsSupportedForA2DP()");
+    ALOGV("getHwOffloadFormatsSupportedForBluetoothMedia()");
     status_t status = NO_ERROR;
     std::unordered_set<audio_format_t> formatSet;
     sp<HwModule> primaryModule =
@@ -537,8 +540,23 @@
         ALOGE("%s() unable to get primary module", __func__);
         return NO_INIT;
     }
+
+    DeviceTypeSet audioDeviceSet;
+
+    switch(device) {
+    case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP:
+        audioDeviceSet = getAudioDeviceOutAllA2dpSet();
+        break;
+    case AUDIO_DEVICE_OUT_BLE_HEADSET:
+        audioDeviceSet = getAudioDeviceOutAllBleSet();
+        break;
+    default:
+        ALOGE("%s() device type 0x%08x not supported", __func__, device);
+        return BAD_VALUE;
+    }
+
     DeviceVector declaredDevices = primaryModule->getDeclaredDevices().getDevicesFromTypes(
-            getAudioDeviceOutAllA2dpSet());
+            audioDeviceSet);
     for (const auto& device : declaredDevices) {
         formatSet.insert(device->encodedFormats().begin(), device->encodedFormats().end());
     }
@@ -1827,7 +1845,7 @@
     if (stream == AUDIO_STREAM_TTS) {
         ALOGV("\t found BEACON stream");
         if (!mTtsOutputAvailable && mOutputs.isAnyOutputActive(
-                                    toVolumeSource(AUDIO_STREAM_TTS) /*sourceToIgnore*/)) {
+                                    toVolumeSource(AUDIO_STREAM_TTS, false) /*sourceToIgnore*/)) {
             return INVALID_OPERATION;
         } else {
             beaconMuteLatency = handleEventForBeacon(STARTING_BEACON);
@@ -2049,12 +2067,20 @@
         if (outputDesc->getActivityCount(clientVolSrc) == 0 || forceDeviceUpdate) {
             outputDesc->setStopTime(client, systemTime());
             DeviceVector newDevices = getNewOutputDevices(outputDesc, false /*fromCache*/);
+
+            // If the routing does not change, if an output is routed on a device using HwGain
+            // (aka setAudioPortConfig) and there are still active clients following different
+            // volume group(s), force reapply volume
+            bool requiresVolumeCheck = outputDesc->getActivityCount(clientVolSrc) == 0 &&
+                    outputDesc->useHwGain() && outputDesc->isAnyActive(VOLUME_SOURCE_NONE);
+
             // delay the device switch by twice the latency because stopOutput() is executed when
             // the track stop() command is received and at that time the audio track buffer can
             // still contain data that needs to be drained. The latency only covers the audio HAL
             // and kernel buffers. Also the latency does not always include additional delay in the
             // audio path (audio DSP, CODEC ...)
-            setOutputDevices(outputDesc, newDevices, false, outputDesc->latency()*2);
+            setOutputDevices(outputDesc, newDevices, false, outputDesc->latency()*2,
+                             nullptr, true /*requiresMuteCheck*/, requiresVolumeCheck);
 
             // force restoring the device selection on other active outputs if it differs from the
             // one being selected for this output
@@ -2182,7 +2208,7 @@
     }
 
     // Explicit routing?
-    sp<DeviceDescriptor> explicitRoutingDevice = 
+    sp<DeviceDescriptor> explicitRoutingDevice =
             mAvailableInputDevices.getDeviceFromId(*selectedDeviceId);
 
     // special case for mmap capture: if an input IO handle is specified, we reuse this input if
@@ -2368,7 +2394,7 @@
             profileFlags = AUDIO_INPUT_FLAG_NONE; // retry
         } else { // fail
             ALOGW("%s could not find profile for device %s, sampling rate %u, format %#x, "
-                  "channel mask 0x%X, flags %#x", __func__, device->toString().c_str(), 
+                  "channel mask 0x%X, flags %#x", __func__, device->toString().c_str(),
                   config->sample_rate, config->format, config->channel_mask, flags);
             return input;
         }
@@ -2810,6 +2836,8 @@
         // HW Gain management, do not change the volume
         if (desc->useHwGain()) {
             applyVolume = false;
+            // If the volume source is active with higher priority source, ensure at least Sw Muted
+            desc->setSwMute((index == 0), vs, curves.getStreamTypes(), curDevices, 0 /*delayMs*/);
             for (const auto &productStrategy : mEngine->getOrderedProductStrategies()) {
                 auto activeClients = desc->clientsList(true /*activeOnly*/, productStrategy,
                                                        false /*preferredDevice*/);
@@ -2849,7 +2877,7 @@
         // handled by system UI
         status_t volStatus = checkAndSetVolume(
                     curves, vs, index, desc, curDevices,
-                    ((vs == toVolumeSource(AUDIO_STREAM_SYSTEM))?
+                    ((vs == toVolumeSource(AUDIO_STREAM_SYSTEM, false))?
                          TOUCH_SOUND_FIXED_DELAY_MS : 0));
         if (volStatus != NO_ERROR) {
             status = volStatus;
@@ -3051,12 +3079,14 @@
 
 bool AudioPolicyManager::isStreamActive(audio_stream_type_t stream, uint32_t inPastMs) const
 {
-    return mOutputs.isActive(toVolumeSource(stream), inPastMs);
+    auto vs = toVolumeSource(stream, false);
+    return vs != VOLUME_SOURCE_NONE ? mOutputs.isActive(vs, inPastMs) : false;
 }
 
 bool AudioPolicyManager::isStreamActiveRemotely(audio_stream_type_t stream, uint32_t inPastMs) const
 {
-    return mOutputs.isActiveRemotely(toVolumeSource(stream), inPastMs);
+    auto vs = toVolumeSource(stream, false);
+    return vs != VOLUME_SOURCE_NONE ? mOutputs.isActiveRemotely(vs, inPastMs) : false;
 }
 
 bool AudioPolicyManager::isSourceActive(audio_source_t source) const
@@ -5189,9 +5219,8 @@
             continue;
         }
         mHwModules.push_back(hwModule);
-        // open all output streams needed to access attached devices
-        // except for direct output streams that are only opened when they are actually
-        // required by an app.
+        // open all output streams needed to access attached devices.
+        // direct outputs are closed immediately after checking the availability of attached devices
         // This also validates mAvailableOutputDevices list
         for (const auto& outProfile : hwModule->getOutputProfiles()) {
             if (!outProfile->canOpenNewIo()) {
@@ -5588,7 +5617,7 @@
             } // endif input != 0
 
             if (input == AUDIO_IO_HANDLE_NONE) {
-                ALOGW("%s could not open input for device %s", __func__,  
+                ALOGW("%s could not open input for device %s", __func__,
                        device->toString().c_str());
                 profiles.removeAt(profile_index);
                 profile_index--;
@@ -5957,14 +5986,20 @@
                     client->getSecondaryOutputs().begin(),
                     client->getSecondaryOutputs().end(),
                     secondaryDescs.begin(), secondaryDescs.end())) {
-                std::vector<wp<SwAudioOutputDescriptor>> weakSecondaryDescs;
-                std::vector<audio_io_handle_t> secondaryOutputIds;
-                for (const auto& secondaryDesc : secondaryDescs) {
-                    secondaryOutputIds.push_back(secondaryDesc->mIoHandle);
-                    weakSecondaryDescs.push_back(secondaryDesc);
+                if (!audio_is_linear_pcm(client->config().format)) {
+                    // If the format is not PCM, the tracks should be invalidated to get correct
+                    // behavior when the secondary output is changed.
+                    streamsToInvalidate.insert(client->stream());
+                } else {
+                    std::vector<wp<SwAudioOutputDescriptor>> weakSecondaryDescs;
+                    std::vector<audio_io_handle_t> secondaryOutputIds;
+                    for (const auto &secondaryDesc: secondaryDescs) {
+                        secondaryOutputIds.push_back(secondaryDesc->mIoHandle);
+                        weakSecondaryDescs.push_back(secondaryDesc);
+                    }
+                    trackSecondaryOutputs.emplace(client->portId(), secondaryOutputIds);
+                    client->setSecondaryOutputs(std::move(weakSecondaryDescs));
                 }
-                trackSecondaryOutputs.emplace(client->portId(), secondaryOutputIds);
-                client->setSecondaryOutputs(std::move(weakSecondaryDescs));
             }
         }
     }
@@ -6078,9 +6113,10 @@
 
         auto doGetOutputDevicesForVoice = [&]() {
             return hasVoiceStream(streams) && (outputDesc == mPrimaryOutput ||
-                outputDesc->isActive(toVolumeSource(AUDIO_STREAM_VOICE_CALL))) &&
+                outputDesc->isActive(toVolumeSource(AUDIO_STREAM_VOICE_CALL, false))) &&
                 (isInCall() ||
-                 mOutputs.isStrategyActiveOnSameModule(productStrategy, outputDesc));
+                 mOutputs.isStrategyActiveOnSameModule(productStrategy, outputDesc)) &&
+                !isStreamActive(AUDIO_STREAM_ENFORCED_AUDIBLE, 0);
         };
 
         // With low-latency playing on speaker, music on WFD, when the first low-latency
@@ -6173,7 +6209,7 @@
         devices.merge(curDevices);
         for (audio_io_handle_t output : getOutputsForDevices(curDevices, mOutputs)) {
             sp<AudioOutputDescriptor> outputDesc = mOutputs.valueFor(output);
-            if (outputDesc->isActive(toVolumeSource(curStream))) {
+            if (outputDesc->isActive(toVolumeSource(curStream, false))) {
                 activeDevices.merge(outputDesc->devices());
             }
         }
@@ -6274,7 +6310,11 @@
         // mute/unmute AUDIO_STREAM_TTS on all outputs
         ALOGV("\t muting %d", mute);
         uint32_t maxLatency = 0;
-        auto ttsVolumeSource = toVolumeSource(AUDIO_STREAM_TTS);
+        auto ttsVolumeSource = toVolumeSource(AUDIO_STREAM_TTS, false);
+        if (ttsVolumeSource == VOLUME_SOURCE_NONE) {
+            ALOGV("\t no tts volume source available");
+            return 0;
+        }
         for (size_t i = 0; i < mOutputs.size(); i++) {
             sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
             setVolumeSourceMute(ttsVolumeSource, mute/*on*/, desc, 0 /*delay*/, DeviceTypeSet());
@@ -6384,7 +6424,7 @@
                                               bool force,
                                               int delayMs,
                                               audio_patch_handle_t *patchHandle,
-                                              bool requiresMuteCheck)
+                                              bool requiresMuteCheck, bool requiresVolumeCheck)
 {
     ALOGV("%s device %s delayMs %d", __func__, devices.toString().c_str(), delayMs);
     uint32_t muteWaitMs;
@@ -6400,6 +6440,7 @@
     // filter devices according to output selected
     DeviceVector filteredDevices = outputDesc->filterSupportedDevices(devices);
     DeviceVector prevDevices = outputDesc->devices();
+    DeviceVector availPrevDevices = mAvailableOutputDevices.filter(prevDevices);
 
     ALOGV("setOutputDevices() prevDevice %s", prevDevices.toString().c_str());
 
@@ -6418,8 +6459,7 @@
     // no need to proceed if new device is not AUDIO_DEVICE_NONE and not supported by current
     // output profile or if new device is not supported AND previous device(s) is(are) still
     // available (otherwise reset device must be done on the output)
-    if (!devices.isEmpty() && filteredDevices.isEmpty() &&
-            !mAvailableOutputDevices.filter(prevDevices).empty()) {
+    if (!devices.isEmpty() && filteredDevices.isEmpty() && !availPrevDevices.empty()) {
         ALOGV("%s: unsupported device %s for output", __func__, devices.toString().c_str());
         // restore previous device after evaluating strategy mute state
         outputDesc->setDevices(prevDevices);
@@ -6433,16 +6473,20 @@
     //  AND the output is connected by a valid audio patch.
     // Doing this check here allows the caller to call setOutputDevices() without conditions
     if ((filteredDevices.isEmpty() || filteredDevices == prevDevices) &&
-            !force && outputDesc->getPatchHandle() != 0) {
+            !force && outputDesc->getPatchHandle() != AUDIO_PATCH_HANDLE_NONE) {
         ALOGV("%s setting same device %s or null device, force=%d, patch handle=%d", __func__,
               filteredDevices.toString().c_str(), force, outputDesc->getPatchHandle());
+        if (requiresVolumeCheck && !filteredDevices.isEmpty()) {
+            ALOGV("%s setting same device on routed output, force apply volumes", __func__);
+            applyStreamVolumes(outputDesc, filteredDevices.types(), delayMs, true /*force*/);
+        }
         return muteWaitMs;
     }
 
     ALOGV("%s changing device to %s", __func__, filteredDevices.toString().c_str());
 
     // do the routing
-    if (filteredDevices.isEmpty()) {
+    if (filteredDevices.isEmpty() || mAvailableOutputDevices.filter(filteredDevices).empty()) {
         resetOutputDevice(outputDesc, delayMs, NULL);
     } else {
         PatchBuilder patchBuilder;
@@ -6606,11 +6650,11 @@
     // louder than the accessibility prompt, the prompt cannot be heard, thus masking the touch
     // exploration of the dialer UI. In this situation, bring the accessibility volume closer to
     // the ringtone volume
-    const auto callVolumeSrc = toVolumeSource(AUDIO_STREAM_VOICE_CALL);
-    const auto ringVolumeSrc = toVolumeSource(AUDIO_STREAM_RING);
-    const auto musicVolumeSrc = toVolumeSource(AUDIO_STREAM_MUSIC);
-    const auto alarmVolumeSrc = toVolumeSource(AUDIO_STREAM_ALARM);
-    const auto a11yVolumeSrc = toVolumeSource(AUDIO_STREAM_ACCESSIBILITY);
+    const auto callVolumeSrc = toVolumeSource(AUDIO_STREAM_VOICE_CALL, false);
+    const auto ringVolumeSrc = toVolumeSource(AUDIO_STREAM_RING, false);
+    const auto musicVolumeSrc = toVolumeSource(AUDIO_STREAM_MUSIC, false);
+    const auto alarmVolumeSrc = toVolumeSource(AUDIO_STREAM_ALARM, false);
+    const auto a11yVolumeSrc = toVolumeSource(AUDIO_STREAM_ACCESSIBILITY, false);
 
     if (volumeSource == a11yVolumeSrc
             && (AUDIO_MODE_RINGTONE == mEngine->getPhoneState()) &&
@@ -6623,12 +6667,12 @@
     // in-call: always cap volume by voice volume + some low headroom
     if ((volumeSource != callVolumeSrc && (isInCall() ||
                                            mOutputs.isActiveLocally(callVolumeSrc))) &&
-            (volumeSource == toVolumeSource(AUDIO_STREAM_SYSTEM) ||
+            (volumeSource == toVolumeSource(AUDIO_STREAM_SYSTEM, false) ||
              volumeSource == ringVolumeSrc || volumeSource == musicVolumeSrc ||
              volumeSource == alarmVolumeSrc ||
-             volumeSource == toVolumeSource(AUDIO_STREAM_NOTIFICATION) ||
-             volumeSource == toVolumeSource(AUDIO_STREAM_ENFORCED_AUDIBLE) ||
-             volumeSource == toVolumeSource(AUDIO_STREAM_DTMF) ||
+             volumeSource == toVolumeSource(AUDIO_STREAM_NOTIFICATION, false) ||
+             volumeSource == toVolumeSource(AUDIO_STREAM_ENFORCED_AUDIBLE, false) ||
+             volumeSource == toVolumeSource(AUDIO_STREAM_DTMF, false) ||
              volumeSource == a11yVolumeSrc)) {
         auto &voiceCurves = getVolumeCurves(callVolumeSrc);
         int voiceVolumeIndex = voiceCurves.getVolumeIndex(deviceTypes);
@@ -6666,9 +6710,9 @@
              AUDIO_DEVICE_OUT_BLE_HEADSET}).empty() &&
             ((volumeSource == alarmVolumeSrc ||
               volumeSource == ringVolumeSrc) ||
-             (volumeSource == toVolumeSource(AUDIO_STREAM_NOTIFICATION)) ||
-             (volumeSource == toVolumeSource(AUDIO_STREAM_SYSTEM)) ||
-             ((volumeSource == toVolumeSource(AUDIO_STREAM_ENFORCED_AUDIBLE)) &&
+             (volumeSource == toVolumeSource(AUDIO_STREAM_NOTIFICATION, false)) ||
+             (volumeSource == toVolumeSource(AUDIO_STREAM_SYSTEM, false)) ||
+             ((volumeSource == toVolumeSource(AUDIO_STREAM_ENFORCED_AUDIBLE, false)) &&
               (mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_SYSTEM) == AUDIO_POLICY_FORCE_NONE))) &&
             curves.canBeMuted()) {
 
@@ -6754,10 +6798,10 @@
                outputDesc->getMuteCount(volumeSource), outputDesc->isActive(volumeSource));
         return NO_ERROR;
     }
-    VolumeSource callVolSrc = toVolumeSource(AUDIO_STREAM_VOICE_CALL);
-    VolumeSource btScoVolSrc = toVolumeSource(AUDIO_STREAM_BLUETOOTH_SCO);
-    bool isVoiceVolSrc = callVolSrc == volumeSource;
-    bool isBtScoVolSrc = btScoVolSrc == volumeSource;
+    VolumeSource callVolSrc = toVolumeSource(AUDIO_STREAM_VOICE_CALL, false);
+    VolumeSource btScoVolSrc = toVolumeSource(AUDIO_STREAM_BLUETOOTH_SCO, false);
+    bool isVoiceVolSrc = (volumeSource != VOLUME_SOURCE_NONE) && (callVolSrc == volumeSource);
+    bool isBtScoVolSrc = (volumeSource != VOLUME_SOURCE_NONE) && (btScoVolSrc == volumeSource);
 
     bool isScoRequested = isScoRequestedForComm();
     // do not change in call volume if bluetooth is connected and vice versa
@@ -6782,8 +6826,9 @@
                     isSingleDeviceType(deviceTypes, audio_is_bluetooth_out_sco_device))) {
         volumeDb = 0.0f;
     }
+    const bool muted = (index == 0) && (volumeDb != 0.0f);
     outputDesc->setVolume(
-            volumeDb, volumeSource, curves.getStreamTypes(), deviceTypes, delayMs, force);
+            volumeDb, muted, volumeSource, curves.getStreamTypes(), deviceTypes, delayMs, force);
 
     if (outputDesc == mPrimaryOutput && (isVoiceVolSrc || isBtScoVolSrc)) {
         float voiceVolume;
@@ -6825,8 +6870,10 @@
     for (auto attributes: mEngine->getAllAttributesForProductStrategy(strategy)) {
         ALOGVV("%s() attributes %s, mute %d, output ID %d", __func__,
                toString(attributes).c_str(), on, outputDesc->getId());
-        VolumeSource source = toVolumeSource(attributes);
-        if (std::find(begin(sourcesToMute), end(sourcesToMute), source) == end(sourcesToMute)) {
+        VolumeSource source = toVolumeSource(attributes, false);
+        if ((source != VOLUME_SOURCE_NONE) &&
+                (std::find(begin(sourcesToMute), end(sourcesToMute), source)
+                        == end(sourcesToMute))) {
             sourcesToMute.push_back(source);
         }
     }
@@ -6849,7 +6896,7 @@
     if (on) {
         if (!outputDesc->isMuted(volumeSource)) {
             if (curves.canBeMuted() &&
-                    (volumeSource != toVolumeSource(AUDIO_STREAM_ENFORCED_AUDIBLE) ||
+                    (volumeSource != toVolumeSource(AUDIO_STREAM_ENFORCED_AUDIBLE, false) ||
                      (mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_SYSTEM) ==
                       AUDIO_POLICY_FORCE_NONE))) {
                 checkAndSetVolume(curves, volumeSource, 0, outputDesc, deviceTypes, delayMs);
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 967aa10..dcd12cd 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -320,8 +320,8 @@
                                                     audio_format_t *surroundFormats);
         virtual status_t setSurroundFormatEnabled(audio_format_t audioFormat, bool enabled);
 
-        virtual status_t getHwOffloadEncodingFormatsSupportedForA2DP(
-                    std::vector<audio_format_t> *formats);
+        virtual status_t getHwOffloadFormatsSupportedForBluetoothMedia(
+                    audio_devices_t device, std::vector<audio_format_t> *formats);
 
         virtual void setAppState(audio_port_handle_t portId, app_state_t state);
 
@@ -431,13 +431,30 @@
         {
             return static_cast<VolumeSource>(volumeGroup);
         }
-        VolumeSource toVolumeSource(const audio_attributes_t &attributes) const
+        /**
+         * @brief toVolumeSource converts an audio attributes into a volume source
+         * (either a legacy stream or a volume group). If fallback on default is allowed, and if
+         * the audio attributes do not follow any specific product strategy's rule, it will be
+         * associated to default volume source, e.g. music. Thus, any of call of volume API
+         * using this translation function may affect the default volume source.
+         * If fallback is not allowed and no matching rule is identified for the given attributes,
+         * the volume source will be undefined, thus, no volume will be altered/modified.
+         * @param attributes to be considered
+         * @param fallbackOnDefault
+         * @return volume source associated with given attributes, otherwise either music if
+         * fallbackOnDefault is set or none.
+         */
+        VolumeSource toVolumeSource(
+            const audio_attributes_t &attributes, bool fallbackOnDefault = true) const
         {
-            return toVolumeSource(mEngine->getVolumeGroupForAttributes(attributes));
+            return toVolumeSource(mEngine->getVolumeGroupForAttributes(
+                attributes, fallbackOnDefault));
         }
-        VolumeSource toVolumeSource(audio_stream_type_t stream) const
+        VolumeSource toVolumeSource(
+            audio_stream_type_t stream, bool fallbackOnDefault = true) const
         {
-            return toVolumeSource(mEngine->getVolumeGroupForStreamType(stream));
+            return toVolumeSource(mEngine->getVolumeGroupForStreamType(
+                stream, fallbackOnDefault));
         }
         IVolumeCurves &getVolumeCurves(VolumeSource volumeSource)
         {
@@ -463,14 +480,27 @@
         void removeOutput(audio_io_handle_t output);
         void addInput(audio_io_handle_t input, const sp<AudioInputDescriptor>& inputDesc);
 
-        // change the route of the specified output. Returns the number of ms we have slept to
-        // allow new routing to take effect in certain cases.
+        /**
+         * @brief setOutputDevices change the route of the specified output.
+         * @param outputDesc to be considered
+         * @param device to be considered to route the output
+         * @param force if true, force the routing even if no change.
+         * @param delayMs if specified, delay to apply for mute/volume op when changing device
+         * @param patchHandle if specified, the patch handle this output is connected through.
+         * @param requiresMuteCheck if specified, for e.g. when another output is on a shared device
+         *        and currently active, allow to have proper drain and avoid pops
+         * @param requiresVolumeCheck true if called requires to reapply volume if the routing did
+         * not change (but the output is still routed).
+         * @return the number of ms we have slept to allow new routing to take effect in certain
+         * cases.
+         */
         uint32_t setOutputDevices(const sp<SwAudioOutputDescriptor>& outputDesc,
                                   const DeviceVector &device,
                                   bool force = false,
                                   int delayMs = 0,
                                   audio_patch_handle_t *patchHandle = NULL,
-                                  bool requiresMuteCheck = true);
+                                  bool requiresMuteCheck = true,
+                                  bool requiresVolumeCheck = false);
         status_t resetOutputDevice(const sp<AudioOutputDescriptor>& outputDesc,
                                    int delayMs = 0,
                                    audio_patch_handle_t *patchHandle = NULL);
diff --git a/services/audiopolicy/service/Android.bp b/services/audiopolicy/service/Android.bp
index 197f183..f3d4f2f 100644
--- a/services/audiopolicy/service/Android.bp
+++ b/services/audiopolicy/service/Android.bp
@@ -55,6 +55,7 @@
         "audiopolicy-types-aidl-cpp",
         "capture_state_listener-aidl-cpp",
         "framework-permission-aidl-cpp",
+        "packagemanager_aidl-cpp",
         "spatializer-aidl-cpp",
     ],
 
diff --git a/services/audiopolicy/service/AudioPolicyClientImpl.cpp b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
index 79252d4..863ad56 100644
--- a/services/audiopolicy/service/AudioPolicyClientImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
@@ -305,4 +305,15 @@
     return af->updateSecondaryOutputs(trackSecondaryOutputs);
 }
 
+status_t AudioPolicyService::AudioPolicyClient::setDeviceConnectedState(
+        const struct audio_port_v7 *port, bool connected) {
+    sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
+    if (af == nullptr) {
+        ALOGW("%s: could not get AudioFlinger", __func__);
+        return PERMISSION_DENIED;
+    }
+    return af->setDeviceConnectedState(port, connected);
+}
+
+
 } // namespace android
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 4bd1260..ff1e674 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -1885,8 +1885,8 @@
     return Status::ok();
 }
 
-Status AudioPolicyService::getHwOffloadEncodingFormatsSupportedForA2DP(
-        std::vector<media::audio::common::AudioFormat>* _aidl_return) {
+Status AudioPolicyService::getHwOffloadFormatsSupportedForBluetoothMedia(
+        int32_t deviceAidl, std::vector<media::audio::common::AudioFormat>* _aidl_return) {
     std::vector<audio_format_t> formats;
 
     if (mAudioPolicyManager == NULL) {
@@ -1894,8 +1894,10 @@
     }
     Mutex::Autolock _l(mLock);
     AutoCallerClear acc;
+    audio_devices_t device = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_int32_t_audio_devices_t(deviceAidl));
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
-            mAudioPolicyManager->getHwOffloadEncodingFormatsSupportedForA2DP(&formats)));
+            mAudioPolicyManager->getHwOffloadFormatsSupportedForBluetoothMedia(device, &formats)));
     *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
             convertContainer<std::vector<media::audio::common::AudioFormat>>(
                     formats,
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index 102b376..cd83900 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -476,7 +476,7 @@
     }
 }
 
-void AudioPolicyService::NotificationClient::onAudioVolumeGroupChanged(volume_group_t group, 
+void AudioPolicyService::NotificationClient::onAudioVolumeGroupChanged(volume_group_t group,
                                                                       int flags)
 {
     if (mAudioPolicyServiceClient != 0 && mAudioVolumeGroupCallbacksEnabled) {
@@ -1037,7 +1037,7 @@
         case TRANSACTION_removeUidDeviceAffinities:
         case TRANSACTION_setUserIdDeviceAffinities:
         case TRANSACTION_removeUserIdDeviceAffinities:
-        case TRANSACTION_getHwOffloadEncodingFormatsSupportedForA2DP:
+        case TRANSACTION_getHwOffloadFormatsSupportedForBluetoothMedia:
         case TRANSACTION_listAudioVolumeGroups:
         case TRANSACTION_getVolumeGroupFromAudioAttributes:
         case TRANSACTION_acquireSoundTriggerSession:
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index 27c4e1c..1a0f838 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -197,8 +197,8 @@
                                       std::vector<bool>* formatsEnabled) override;
     binder::Status getReportedSurroundFormats(
             media::Int* count, std::vector<media::audio::common::AudioFormat>* formats) override;
-    binder::Status getHwOffloadEncodingFormatsSupportedForA2DP(
-            std::vector<media::audio::common::AudioFormat>* _aidl_return) override;
+    binder::Status getHwOffloadFormatsSupportedForBluetoothMedia(
+            int32_t device, std::vector<media::audio::common::AudioFormat>* _aidl_return) override;
     binder::Status setSurroundFormatEnabled(media::audio::common::AudioFormat audioFormat,
                                             bool enabled) override;
     binder::Status setAssistantUid(int32_t uid) override;
@@ -781,6 +781,9 @@
         status_t updateSecondaryOutputs(
                 const TrackSecondaryOutputsMap& trackSecondaryOutputs) override;
 
+        status_t setDeviceConnectedState(
+                const struct audio_port_v7 *port, bool connected) override;
+
      private:
         AudioPolicyService *mAudioPolicyService;
     };
diff --git a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
index 84b40d2..adef8f1 100644
--- a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
@@ -103,6 +103,11 @@
         ++mAudioPortListUpdateCount;
     }
 
+    status_t setDeviceConnectedState(
+            const struct audio_port_v7 *port __unused, bool connected __unused) override {
+        return NO_ERROR;
+    }
+
     // Helper methods for tests
     size_t getActivePatchesCount() const { return mActivePatches.size(); }
 
diff --git a/services/audiopolicy/tests/AudioPolicyTestClient.h b/services/audiopolicy/tests/AudioPolicyTestClient.h
index 4e0735b..da85658 100644
--- a/services/audiopolicy/tests/AudioPolicyTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyTestClient.h
@@ -96,6 +96,10 @@
             const TrackSecondaryOutputsMap& trackSecondaryOutputs __unused) override {
         return NO_INIT;
     }
+    status_t setDeviceConnectedState(
+            const struct audio_port_v7 *port __unused, bool connected __unused) override {
+        return NO_INIT;
+    }
 };
 
 } // namespace android
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index a16ab7d..9d2d2b3 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -1375,7 +1375,8 @@
     if (type == AUDIO_DEVICE_OUT_HDMI) {
         // Set device connection state failed due to no device descriptor found
         // For HDMI case, it is easier to simulate device descriptor not found error
-        // by using a undeclared encoded format.
+        // by using an encoded format which isn't listed in the 'encodedFormats'
+        // attribute for this devicePort.
         ASSERT_EQ(INVALID_OPERATION, mManager->setDeviceConnectionState(
                 type, AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
                 address.c_str(), name.c_str(), AUDIO_FORMAT_MAT_2_1));
diff --git a/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml b/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
index 87f0ab9..41ed70c 100644
--- a/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
+++ b/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
@@ -50,7 +50,8 @@
                 </devicePort>
                 <devicePort tagName="Built-In Mic" type="AUDIO_DEVICE_IN_BUILTIN_MIC" role="source">
                 </devicePort>
-                <devicePort tagName="Hdmi" type="AUDIO_DEVICE_OUT_HDMI" role="sink">
+                <devicePort tagName="Hdmi" type="AUDIO_DEVICE_OUT_HDMI" role="sink"
+                            encodedFormats="AUDIO_FORMAT_AC3">
                 </devicePort>
                 <devicePort tagName="Hdmi-In Mic" type="AUDIO_DEVICE_IN_HDMI" role="source">
                 </devicePort>
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 4dfbb6f..229964c 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -363,16 +363,18 @@
     std::string cameraId(id.c_str());
     hardware::camera::common::V1_0::CameraResourceCost cost;
     status_t res = mCameraProviderManager->getResourceCost(cameraId, &cost);
-    SystemCameraKind deviceKind = SystemCameraKind::PUBLIC;
     if (res != OK) {
         ALOGE("Failed to query device resource cost: %s (%d)", strerror(-res), res);
         return;
     }
+    SystemCameraKind deviceKind = SystemCameraKind::PUBLIC;
     res = mCameraProviderManager->getSystemCameraKind(cameraId, &deviceKind);
     if (res != OK) {
         ALOGE("Failed to query device kind: %s (%d)", strerror(-res), res);
         return;
     }
+    std::vector<std::string> physicalCameraIds;
+    mCameraProviderManager->isLogicalCamera(cameraId, &physicalCameraIds);
     std::set<String8> conflicting;
     for (size_t i = 0; i < cost.conflictingDevices.size(); i++) {
         conflicting.emplace(String8(cost.conflictingDevices[i].c_str()));
@@ -381,7 +383,7 @@
     {
         Mutex::Autolock lock(mCameraStatesLock);
         mCameraStates.emplace(id, std::make_shared<CameraState>(id, cost.resourceCost,
-                                                                conflicting, deviceKind));
+                conflicting, deviceKind, physicalCameraIds));
     }
 
     if (mFlashlight->hasFlashUnit(id)) {
@@ -560,6 +562,13 @@
     onTorchStatusChangedLocked(cameraId, newStatus, systemCameraKind);
 }
 
+
+void CameraService::onTorchStatusChanged(const String8& cameraId,
+        TorchModeStatus newStatus, SystemCameraKind systemCameraKind) {
+    Mutex::Autolock al(mTorchStatusMutex);
+    onTorchStatusChangedLocked(cameraId, newStatus, systemCameraKind);
+}
+
 void CameraService::onTorchStatusChangedLocked(const String8& cameraId,
         TorchModeStatus newStatus, SystemCameraKind systemCameraKind) {
     ALOGI("%s: Torch status changed for cameraId=%s, newStatus=%d",
@@ -3682,9 +3691,10 @@
 // ----------------------------------------------------------------------------
 
 CameraService::CameraState::CameraState(const String8& id, int cost,
-        const std::set<String8>& conflicting, SystemCameraKind systemCameraKind) : mId(id),
+        const std::set<String8>& conflicting, SystemCameraKind systemCameraKind,
+        const std::vector<std::string>& physicalCameras) : mId(id),
         mStatus(StatusInternal::NOT_PRESENT), mCost(cost), mConflicting(conflicting),
-        mSystemCameraKind(systemCameraKind) {}
+        mSystemCameraKind(systemCameraKind), mPhysicalCameras(physicalCameras) {}
 
 CameraService::CameraState::~CameraState() {}
 
@@ -3723,6 +3733,11 @@
     return mSystemCameraKind;
 }
 
+bool CameraService::CameraState::containsPhysicalCamera(const std::string& physicalCameraId) const {
+    return std::find(mPhysicalCameras.begin(), mPhysicalCameras.end(), physicalCameraId)
+            != mPhysicalCameras.end();
+}
+
 bool CameraService::CameraState::addUnavailablePhysicalId(const String8& physicalId) {
     Mutex::Autolock lock(mStatusLock);
     auto result = mUnavailablePhysicalIds.insert(physicalId);
@@ -4341,18 +4356,9 @@
     std::list<String16> retList;
     Mutex::Autolock lock(mCameraStatesLock);
     for (const auto& state : mCameraStates) {
-        std::vector<std::string> physicalCameraIds;
-        if (!mCameraProviderManager->isLogicalCamera(state.first.c_str(), &physicalCameraIds)) {
-            // This is not a logical multi-camera.
-            continue;
+        if (state.second->containsPhysicalCamera(physicalCameraId.c_str())) {
+            retList.emplace_back(String16(state.first));
         }
-        if (std::find(physicalCameraIds.begin(), physicalCameraIds.end(), physicalCameraId.c_str())
-                == physicalCameraIds.end()) {
-            // cameraId is not a physical camera of this logical multi-camera.
-            continue;
-        }
-
-        retList.emplace_back(String16(state.first));
     }
     return retList;
 }
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index bc2e347..d5feeeb 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -110,8 +110,16 @@
     virtual void        onDeviceStatusChanged(const String8 &cameraId,
             const String8 &physicalCameraId,
             hardware::camera::common::V1_0::CameraDeviceStatus newHalStatus) override;
+    // This method may hold CameraProviderManager::mInterfaceMutex as a part
+    // of calling getSystemCameraKind() internally. Care should be taken not to
+    // directly / indirectly call this from callers who also hold
+    // mInterfaceMutex.
     virtual void        onTorchStatusChanged(const String8& cameraId,
             hardware::camera::common::V1_0::TorchModeStatus newStatus) override;
+    // Does not hold CameraProviderManager::mInterfaceMutex.
+    virtual void        onTorchStatusChanged(const String8& cameraId,
+            hardware::camera::common::V1_0::TorchModeStatus newStatus,
+            SystemCameraKind kind) override;
     virtual void        onNewProviderRegistered() override;
 
     /////////////////////////////////////////////////////////////////////
@@ -563,7 +571,7 @@
          * returned in the HAL's camera_info struct for each device.
          */
         CameraState(const String8& id, int cost, const std::set<String8>& conflicting,
-                SystemCameraKind deviceKind);
+                SystemCameraKind deviceKind, const std::vector<std::string>& physicalCameras);
         virtual ~CameraState();
 
         /**
@@ -621,6 +629,12 @@
         SystemCameraKind getSystemCameraKind() const;
 
         /**
+         * Return whether this camera is a logical multi-camera and has a
+         * particular physical sub-camera.
+         */
+        bool containsPhysicalCamera(const std::string& physicalCameraId) const;
+
+        /**
          * Add/Remove the unavailable physical camera ID.
          */
         bool addUnavailablePhysicalId(const String8& physicalId);
@@ -641,6 +655,7 @@
         mutable Mutex mStatusLock;
         CameraParameters mShimParams;
         const SystemCameraKind mSystemCameraKind;
+        const std::vector<std::string> mPhysicalCameras; // Empty if not a logical multi-camera
     }; // class CameraState
 
     // Observer for UID lifecycle enforcing that UIDs in idle
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 83b8e95..971628a 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -1696,7 +1696,7 @@
         bool isCompositeStream = false;
         for (const auto& gbp : mConfiguredOutputs[streamId].getGraphicBufferProducers()) {
             sp<Surface> s = new Surface(gbp, false /*controlledByApp*/);
-            isCompositeStream = camera3::DepthCompositeStream::isDepthCompositeStream(s) |
+            isCompositeStream = camera3::DepthCompositeStream::isDepthCompositeStream(s) ||
                 camera3::HeicCompositeStream::isHeicCompositeStream(s);
             if (isCompositeStream) {
                 auto compositeIdx = mCompositeStreamMap.indexOfKey(IInterface::asBinder(gbp));
diff --git a/services/camera/libcameraservice/api2/CompositeStream.h b/services/camera/libcameraservice/api2/CompositeStream.h
index 600bd28..d32b71c 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.h
+++ b/services/camera/libcameraservice/api2/CompositeStream.h
@@ -117,6 +117,41 @@
     // Composite streams should behave accordingly.
     void enableErrorState();
 
+    // Utility class to lock and unlock a GraphicBuffer
+    class GraphicBufferLocker {
+    public:
+        GraphicBufferLocker(sp<GraphicBuffer> buffer) : _buffer(buffer) {}
+
+        status_t lockAsync(void** dstBuffer, int fenceFd) {
+            if (_buffer == nullptr) return BAD_VALUE;
+
+            status_t res = OK;
+            if (!_locked) {
+                status_t res =  _buffer->lockAsync(GRALLOC_USAGE_SW_WRITE_OFTEN,
+                        dstBuffer, fenceFd);
+                if (res == OK) {
+                    _locked = true;
+                }
+            }
+            return res;
+        }
+
+        ~GraphicBufferLocker() {
+            if (_locked && _buffer != nullptr) {
+                auto res = _buffer->unlock();
+                if (res != OK) {
+                    ALOGE("%s: Error trying to unlock buffer: %s (%d)", __FUNCTION__,
+                            strerror(-res), res);
+                }
+            }
+        }
+
+    private:
+        sp<GraphicBuffer> _buffer;
+        bool _locked = false;
+    };
+
+
     wp<CameraDeviceBase>   mDevice;
     wp<camera3::StatusTracker> mStatusTracker;
     wp<hardware::camera2::ICameraDeviceCallbacks> mRemoteCallback;
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
index a66a592..aa057c7 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
@@ -297,7 +297,8 @@
     }
 
     sp<GraphicBuffer> gb = GraphicBuffer::from(anb);
-    res = gb->lockAsync(GRALLOC_USAGE_SW_WRITE_OFTEN, &dstBuffer, fenceFd);
+    GraphicBufferLocker gbLocker(gb);
+    res = gbLocker.lockAsync(&dstBuffer, fenceFd);
     if (res != OK) {
         ALOGE("%s: Error trying to lock output buffer fence: %s (%d)", __FUNCTION__,
                 strerror(-res), res);
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index a73ffb9..5da77d6 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -441,6 +441,10 @@
             newFormat->setInt32(KEY_TILE_HEIGHT, mGridHeight);
             newFormat->setInt32(KEY_GRID_ROWS, mGridRows);
             newFormat->setInt32(KEY_GRID_COLUMNS, mGridCols);
+            int32_t left, top, right, bottom;
+            if (newFormat->findRect("crop", &left, &top, &right, &bottom)) {
+                newFormat->setRect("crop", 0, 0, mOutputWidth - 1, mOutputHeight - 1);
+            }
         }
     }
     newFormat->setInt32(KEY_IS_DEFAULT, 1 /*isPrimary*/);
@@ -1130,7 +1134,8 @@
     // Copy the content of the file to memory.
     sp<GraphicBuffer> gb = GraphicBuffer::from(inputFrame.anb);
     void* dstBuffer;
-    auto res = gb->lockAsync(GRALLOC_USAGE_SW_WRITE_OFTEN, &dstBuffer, inputFrame.fenceFd);
+    GraphicBufferLocker gbLocker(gb);
+    auto res = gbLocker.lockAsync(&dstBuffer, inputFrame.fenceFd);
     if (res != OK) {
         ALOGE("%s: Error trying to lock output buffer fence: %s (%d)", __FUNCTION__,
                 strerror(-res), res);
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index 2f74df9..0cce2ca 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -1960,16 +1960,19 @@
         const hardware::hidl_string& cameraDeviceName,
         TorchModeStatus newStatus) {
     sp<StatusListener> listener;
+    SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
     std::string id;
+    bool known = false;
     {
-        std::lock_guard<std::mutex> lock(mManager->mStatusListenerMutex);
-        bool known = false;
+        // Hold mLock for accessing mDevices
+        std::lock_guard<std::mutex> lock(mLock);
         for (auto& deviceInfo : mDevices) {
             if (deviceInfo->mName == cameraDeviceName) {
                 ALOGI("Camera device %s torch status is now %s", cameraDeviceName.c_str(),
                         torchStatusToString(newStatus));
                 id = deviceInfo->mId;
                 known = true;
+                systemCameraKind = deviceInfo->mSystemCameraKind;
                 if (TorchModeStatus::AVAILABLE_ON != newStatus) {
                     mManager->removeRef(DeviceMode::TORCH, id);
                 }
@@ -1981,11 +1984,19 @@
                     mProviderName.c_str(), cameraDeviceName.c_str(), newStatus);
             return hardware::Void();
         }
+        // no lock needed since listener is set up only once during
+        // CameraProviderManager initialization and then never changed till it is
+        // destructed.
         listener = mManager->getStatusListener();
-    }
+     }
     // Call without lock held to allow reentrancy into provider manager
+    // The problem with holding mLock here is that we
+    // might be limiting re-entrancy : CameraService::onTorchStatusChanged calls
+    // back into CameraProviderManager which might try to hold mLock again (eg:
+    // findDeviceInfo, which should be holding mLock while iterating through
+    // each provider's devices).
     if (listener != nullptr) {
-        listener->onTorchStatusChanged(String8(id.c_str()), newStatus);
+        listener->onTorchStatusChanged(String8(id.c_str()), newStatus, systemCameraKind);
     }
     return hardware::Void();
 }
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index e3763a1..fdb2673 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -155,6 +155,9 @@
                 const String8 &physicalCameraId,
                 hardware::camera::common::V1_0::CameraDeviceStatus newStatus) = 0;
         virtual void onTorchStatusChanged(const String8 &cameraId,
+                hardware::camera::common::V1_0::TorchModeStatus newStatus,
+                SystemCameraKind kind) = 0;
+        virtual void onTorchStatusChanged(const String8 &cameraId,
                 hardware::camera::common::V1_0::TorchModeStatus newStatus) = 0;
         virtual void onNewProviderRegistered() = 0;
     };
@@ -329,8 +332,6 @@
     // All private members, unless otherwise noted, expect mInterfaceMutex to be locked before use
     mutable std::mutex mInterfaceMutex;
 
-    // the status listener update callbacks will lock mStatusMutex
-    mutable std::mutex mStatusListenerMutex;
     wp<StatusListener> mListener;
     ServiceInteractionProxy* mServiceProxy;
 
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 3738d01..e60fdb3 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -550,7 +550,7 @@
     mHandoutTotalBufferCount = 0;
     mFrameCount = 0;
     mLastTimestamp = 0;
-    mUseMonoTimestamp = (isConsumedByHWComposer() | isVideoStream());
+    mUseMonoTimestamp = (isConsumedByHWComposer() || isVideoStream());
 
     res = native_window_set_buffer_count(mConsumer.get(),
             mTotalBufferCount);
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
index d765b02..5c54dc7 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
@@ -472,7 +472,7 @@
         // Note down the just completed frame number
         if (request.hasInputBuffer) {
             states.lastCompletedReprocessFrameNumber = frameNumber;
-        } else if (request.zslCapture) {
+        } else if (request.zslCapture && request.stillCapture) {
             states.lastCompletedZslFrameNumber = frameNumber;
         } else {
             states.lastCompletedRegularFrameNumber = frameNumber;
@@ -969,7 +969,8 @@
 void returnAndRemovePendingOutputBuffers(bool useHalBufManager,
         sp<NotificationListener> listener, InFlightRequest& request,
         SessionStatsBuilder& sessionStatsBuilder) {
-    bool timestampIncreasing = !(request.zslCapture || request.hasInputBuffer);
+    bool timestampIncreasing =
+            !((request.zslCapture && request.stillCapture) || request.hasInputBuffer);
     returnOutputBuffers(useHalBufManager, listener,
             request.pendingOutputBuffers.array(),
             request.pendingOutputBuffers.size(),
diff --git a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
index a74fd9d..c8a6b32 100644
--- a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
+++ b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
@@ -274,6 +274,8 @@
             hardware::camera::common::V1_0::CameraDeviceStatus) override {}
     void onTorchStatusChanged(const String8 &,
             hardware::camera::common::V1_0::TorchModeStatus) override {}
+    void onTorchStatusChanged(const String8 &,
+            hardware::camera::common::V1_0::TorchModeStatus, SystemCameraKind) override {}
     void onNewProviderRegistered() override {}
 };
 
diff --git a/services/mediacodec/OWNERS b/services/mediacodec/OWNERS
index c716cce..3453a76 100644
--- a/services/mediacodec/OWNERS
+++ b/services/mediacodec/OWNERS
@@ -1,2 +1,3 @@
 jeffv@google.com
-marcone@google.com
+essick@google.com
+wonsik@google.com
diff --git a/services/mediacodec/android.hardware.media.omx@1.0-service.rc b/services/mediacodec/android.hardware.media.omx@1.0-service.rc
index 3ef9a85..845e5cc 100644
--- a/services/mediacodec/android.hardware.media.omx@1.0-service.rc
+++ b/services/mediacodec/android.hardware.media.omx@1.0-service.rc
@@ -3,4 +3,4 @@
     user mediacodec
     group camera drmrpc mediadrm
     ioprio rt 4
-    writepid /dev/cpuset/foreground/tasks
+    task_profiles ProcessCapacityHigh
diff --git a/services/mediacodec/registrant/Android.bp b/services/mediacodec/registrant/Android.bp
index 696b967..d10e339 100644
--- a/services/mediacodec/registrant/Android.bp
+++ b/services/mediacodec/registrant/Android.bp
@@ -7,7 +7,7 @@
     default_applicable_licenses: ["frameworks_av_services_mediacodec_license"],
 }
 
-cc_library_shared {
+cc_library {
     name: "libmedia_codecserviceregistrant",
     vendor_available: true,
     srcs: [
diff --git a/services/mediacodec/registrant/fuzzer/Android.bp b/services/mediacodec/registrant/fuzzer/Android.bp
new file mode 100644
index 0000000..43afbf1
--- /dev/null
+++ b/services/mediacodec/registrant/fuzzer/Android.bp
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_services_mediacodec_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_services_mediacodec_license"],
+}
+
+cc_fuzz {
+    name: "codecServiceRegistrant_fuzzer",
+    srcs: [
+        "codecServiceRegistrant_fuzzer.cpp",
+    ],
+    static_libs: [
+        "libmedia_codecserviceregistrant",
+    ],
+    header_libs: [
+        "libmedia_headers",
+    ],
+    defaults: [
+        "libcodec2-hidl-defaults",
+    ],
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
diff --git a/services/mediacodec/registrant/fuzzer/README.md b/services/mediacodec/registrant/fuzzer/README.md
new file mode 100644
index 0000000..0ffa063
--- /dev/null
+++ b/services/mediacodec/registrant/fuzzer/README.md
@@ -0,0 +1,56 @@
+# Fuzzer for libmedia_codecserviceregistrant
+
+## Plugin Design Considerations
+The fuzzer plugin for libmedia_codecserviceregistrant is designed based on the understanding of the library and tries to achieve the following:
+
+##### Maximize code coverage
+The configuration parameters are not hardcoded, but instead selected based on
+incoming data. This ensures more code paths are reached by the fuzzer.
+
+libmedia_codecserviceregistrant supports the following parameters:
+1. C2String (parameter name: `c2String`)
+2. Width (parameter name: `width`)
+3. Height (parameter name: `height`)
+4. SamplingRate (parameter name: `samplingRate`)
+5. Channels (parameter name: `channels`)
+6. Stream (parameter name: `stream`)
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `c2String` |`String` | Value obtained from FuzzedDataProvider|
+| `width` |`UINT32_MIN` to `UINT32_MAX` | Value obtained from FuzzedDataProvider|
+| `height` |`UINT32_MIN` to `UINT32_MAX` | Value obtained from FuzzedDataProvider|
+| `samplingRate` |`UINT32_MIN` to `UINT32_MAX` | Value obtained from FuzzedDataProvider|
+| `channels` |`UINT32_MIN` to `UINT32_MAX` | Value obtained from FuzzedDataProvider|
+| `stream` |`UINT32_MIN` to `UINT32_MAX` | Value obtained from FuzzedDataProvider|
+
+This also ensures that the plugin is always deterministic for any given input.
+
+##### Maximize utilization of input data
+The plugin feeds the entire input data to the libmedia_codecserviceregistrant module.
+This ensures that the plugin tolerates any kind of input (empty, huge,
+malformed, etc) and doesnt `exit()` on any input and thereby increasing the
+chance of identifying vulnerabilities.
+
+## Build
+
+This describes steps to build codecServiceRegistrant_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+  $ mm -j$(nproc) codecServiceRegistrant_fuzzer
+```
+#### Steps to run
+
+To run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/${TARGET_ARCH}/codecServiceRegistrant_fuzzer/codecServiceRegistrant_fuzzer
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/services/mediacodec/registrant/fuzzer/codecServiceRegistrant_fuzzer.cpp b/services/mediacodec/registrant/fuzzer/codecServiceRegistrant_fuzzer.cpp
new file mode 100644
index 0000000..e5983e4
--- /dev/null
+++ b/services/mediacodec/registrant/fuzzer/codecServiceRegistrant_fuzzer.cpp
@@ -0,0 +1,158 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "../CodecServiceRegistrant.cpp"
+#include "fuzzer/FuzzedDataProvider.h"
+#include <C2Config.h>
+#include <C2Param.h>
+
+using namespace std;
+
+constexpr char kServiceName[] = "software";
+
+class CodecServiceRegistrantFuzzer {
+public:
+  void process(const uint8_t *data, size_t size);
+  ~CodecServiceRegistrantFuzzer() {
+    delete mH2C2;
+    if (mInputSize) {
+      delete mInputSize;
+    }
+    if (mSampleRateInfo) {
+      delete mSampleRateInfo;
+    }
+    if (mChannelCountInfo) {
+      delete mChannelCountInfo;
+    }
+  }
+
+private:
+  void initH2C2ComponentStore();
+  void invokeH2C2ComponentStore();
+  void invokeConfigSM();
+  void invokeQuerySM();
+  H2C2ComponentStore *mH2C2 = nullptr;
+  C2StreamPictureSizeInfo::input *mInputSize = nullptr;
+  C2StreamSampleRateInfo::output *mSampleRateInfo = nullptr;
+  C2StreamChannelCountInfo::output *mChannelCountInfo = nullptr;
+  C2Param::Index mIndex = C2StreamProfileLevelInfo::output::PARAM_TYPE;
+  C2StreamFrameRateInfo::output mFrameRate;
+  FuzzedDataProvider *mFDP = nullptr;
+};
+
+void CodecServiceRegistrantFuzzer::initH2C2ComponentStore() {
+  using namespace ::android::hardware::media::c2;
+  shared_ptr<C2ComponentStore> store =
+      android::GetCodec2PlatformComponentStore();
+  if (!store) {
+    return;
+  }
+  android::sp<V1_1::IComponentStore> storeV1_1 =
+      new V1_1::utils::ComponentStore(store);
+  if (storeV1_1->registerAsService(string(kServiceName)) != android::OK) {
+    return;
+  }
+  string const preferredStoreName = string(kServiceName);
+  sp<IComponentStore> preferredStore =
+      IComponentStore::getService(preferredStoreName.c_str());
+  mH2C2 = new H2C2ComponentStore(preferredStore);
+}
+
+void CodecServiceRegistrantFuzzer::invokeConfigSM() {
+  vector<C2Param *> configParams;
+  uint32_t width = mFDP->ConsumeIntegral<uint32_t>();
+  uint32_t height = mFDP->ConsumeIntegral<uint32_t>();
+  uint32_t samplingRate = mFDP->ConsumeIntegral<uint32_t>();
+  uint32_t channels = mFDP->ConsumeIntegral<uint32_t>();
+  if (mFDP->ConsumeBool()) {
+    mInputSize = new C2StreamPictureSizeInfo::input(0u, width, height);
+    configParams.push_back(mInputSize);
+  } else {
+    if (mFDP->ConsumeBool()) {
+      mSampleRateInfo = new C2StreamSampleRateInfo::output(0u, samplingRate);
+      configParams.push_back(mSampleRateInfo);
+    }
+    if (mFDP->ConsumeBool()) {
+      mChannelCountInfo = new C2StreamChannelCountInfo::output(0u, channels);
+      configParams.push_back(mChannelCountInfo);
+    }
+  }
+  vector<unique_ptr<C2SettingResult>> failures;
+  mH2C2->config_sm(configParams, &failures);
+}
+
+void CodecServiceRegistrantFuzzer::invokeQuerySM() {
+  vector<C2Param *> stackParams;
+  vector<C2Param::Index> heapParamIndices;
+  if (mFDP->ConsumeBool()) {
+    stackParams = {};
+    heapParamIndices = {};
+  } else {
+    uint32_t stream = mFDP->ConsumeIntegral<uint32_t>();
+    mFrameRate.setStream(stream);
+    stackParams.push_back(&mFrameRate);
+    heapParamIndices.push_back(mIndex);
+  }
+  vector<unique_ptr<C2Param>> heapParams;
+  mH2C2->query_sm(stackParams, heapParamIndices, &heapParams);
+}
+
+void CodecServiceRegistrantFuzzer::invokeH2C2ComponentStore() {
+  initH2C2ComponentStore();
+  shared_ptr<C2Component> component;
+  shared_ptr<C2ComponentInterface> interface;
+  string c2String = mFDP->ConsumeRandomLengthString();
+  mH2C2->createComponent(c2String, &component);
+  mH2C2->createInterface(c2String, &interface);
+  invokeConfigSM();
+  invokeQuerySM();
+
+  vector<shared_ptr<C2ParamDescriptor>> params;
+  mH2C2->querySupportedParams_nb(&params);
+
+  C2StoreIonUsageInfo usageInfo;
+  std::vector<C2FieldSupportedValuesQuery> query = {
+      C2FieldSupportedValuesQuery::Possible(
+          C2ParamField::Make(usageInfo, usageInfo.usage)),
+      C2FieldSupportedValuesQuery::Possible(
+          C2ParamField::Make(usageInfo, usageInfo.capacity)),
+  };
+  mH2C2->querySupportedValues_sm(query);
+
+  mH2C2->getName();
+  shared_ptr<C2ParamReflector> paramReflector = mH2C2->getParamReflector();
+  if (paramReflector) {
+    paramReflector->describe(C2ComponentDomainSetting::CORE_INDEX);
+  }
+  mH2C2->listComponents();
+  shared_ptr<C2GraphicBuffer> src;
+  shared_ptr<C2GraphicBuffer> dst;
+  mH2C2->copyBuffer(src, dst);
+}
+
+void CodecServiceRegistrantFuzzer::process(const uint8_t *data, size_t size) {
+  mFDP = new FuzzedDataProvider(data, size);
+  invokeH2C2ComponentStore();
+  /** RegisterCodecServices is called here to improve code coverage */
+  /** as currently it is not called by codecServiceRegistrant       */
+  RegisterCodecServices();
+  delete mFDP;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+  CodecServiceRegistrantFuzzer codecServiceRegistrantFuzzer;
+  codecServiceRegistrantFuzzer.process(data, size);
+  return 0;
+}
diff --git a/services/mediacodec/seccomp_policy/mediaswcodec-arm.policy b/services/mediacodec/seccomp_policy/mediaswcodec-arm.policy
index 9058f10..41efce0 100644
--- a/services/mediacodec/seccomp_policy/mediaswcodec-arm.policy
+++ b/services/mediacodec/seccomp_policy/mediaswcodec-arm.policy
@@ -84,5 +84,6 @@
 getgid32: 1
 getegid32: 1
 getgroups32: 1
+sysinfo: 1
 
 @include /apex/com.android.media.swcodec/etc/seccomp_policy/code_coverage.arm.policy
diff --git a/services/mediacodec/seccomp_policy/mediaswcodec-arm64.policy b/services/mediacodec/seccomp_policy/mediaswcodec-arm64.policy
index 4c51a9c..e151a06 100644
--- a/services/mediacodec/seccomp_policy/mediaswcodec-arm64.policy
+++ b/services/mediacodec/seccomp_policy/mediaswcodec-arm64.policy
@@ -78,5 +78,6 @@
 getgid: 1
 getegid: 1
 getgroups: 1
+sysinfo: 1
 
 @include /apex/com.android.media.swcodec/etc/seccomp_policy/code_coverage.arm64.policy
diff --git a/services/mediaextractor/OWNERS b/services/mediaextractor/OWNERS
index c716cce..2a779c2 100644
--- a/services/mediaextractor/OWNERS
+++ b/services/mediaextractor/OWNERS
@@ -1,2 +1,3 @@
 jeffv@google.com
-marcone@google.com
+essick@google.com
+aquilescanta@google.com
diff --git a/services/mediaextractor/mediaextractor.rc b/services/mediaextractor/mediaextractor.rc
index 5fc2941..4fb50d0 100644
--- a/services/mediaextractor/mediaextractor.rc
+++ b/services/mediaextractor/mediaextractor.rc
@@ -3,4 +3,4 @@
     user mediaex
     group drmrpc mediadrm
     ioprio rt 4
-    writepid /dev/cpuset/foreground/tasks
+    task_profiles ProcessCapacityHigh
diff --git a/services/medialog/Android.bp b/services/medialog/Android.bp
index cfc4c40..8088ef0 100644
--- a/services/medialog/Android.bp
+++ b/services/medialog/Android.bp
@@ -26,6 +26,7 @@
         "libmediautils",
         "libnblog",
         "libutils",
+        "packagemanager_aidl-cpp",
     ],
 
     cflags: [
diff --git a/services/mediametrics/Android.bp b/services/mediametrics/Android.bp
index b2c9465..c98d5fc 100644
--- a/services/mediametrics/Android.bp
+++ b/services/mediametrics/Android.bp
@@ -114,6 +114,7 @@
         "libmediautils",
         "libutils",
         "mediametricsservice-aidl-cpp",
+        "packagemanager_aidl-cpp",
     ],
     header_libs: [
         "libaudioutils_headers",
@@ -172,6 +173,7 @@
         "libstatspull",
         "libstatssocket",
         "libutils",
+        "packagemanager_aidl-cpp",
     ],
 
     export_shared_lib_headers: [
diff --git a/services/mediametrics/AudioAnalytics.cpp b/services/mediametrics/AudioAnalytics.cpp
index 21768f8..218d9dd 100644
--- a/services/mediametrics/AudioAnalytics.cpp
+++ b/services/mediametrics/AudioAnalytics.cpp
@@ -1056,10 +1056,10 @@
         if (channelMask != 0) {
             switch (direction) {
                 case 1: // Output, keep sync with AudioTypes#getAAudioDirection()
-                    channelCount = audio_channel_count_from_out_mask(channelMask);
+                    channelCount = (int32_t)audio_channel_count_from_out_mask(channelMask);
                     break;
                 case 2: // Input, keep sync with AudioTypes#getAAudioDirection()
-                    channelCount = audio_channel_count_from_in_mask(channelMask);
+                    channelCount = (int32_t)audio_channel_count_from_in_mask(channelMask);
                     break;
                 default:
                     ALOGW("Invalid direction %d", direction);
diff --git a/services/mediametrics/TransactionLog.h b/services/mediametrics/TransactionLog.h
index 0ca4639..fd42518 100644
--- a/services/mediametrics/TransactionLog.h
+++ b/services/mediametrics/TransactionLog.h
@@ -158,7 +158,7 @@
                 ++it) {
             if (ll <= 0) break;
             if (prefix != nullptr && !startsWith(it->first, prefix)) break;
-            auto [s, l] = dumpMapTimeItem(it->second, ll - 1, sinceNs, prefix);
+            std::tie(s, l) = dumpMapTimeItem(it->second, ll - 1, sinceNs, prefix);
             if (l == 0) continue; // don't show empty groups (due to sinceNs).
             ss << " " << it->first << "\n" << s;
             ll -= l + 1;
diff --git a/services/mediametrics/fuzzer/Android.bp b/services/mediametrics/fuzzer/Android.bp
index b03e518..9da7282 100644
--- a/services/mediametrics/fuzzer/Android.bp
+++ b/services/mediametrics/fuzzer/Android.bp
@@ -56,6 +56,7 @@
         "libstatssocket",
         "libutils",
         "mediametricsservice-aidl-cpp",
+        "packagemanager_aidl-cpp",
     ],
 
     include_dirs: [
diff --git a/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp b/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp
index 8b0b479..06ab16e 100644
--- a/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp
+++ b/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp
@@ -48,6 +48,7 @@
     void invokeAudioAnalytics(const uint8_t *data, size_t size);
     void invokeTimedAction(const uint8_t *data, size_t size);
     void process(const uint8_t *data, size_t size);
+    std::atomic_int mValue = 0;
 };
 
 void MediaMetricsServiceFuzzer::invokeStartsWith(const uint8_t *data, size_t size) {
@@ -342,11 +343,10 @@
 void MediaMetricsServiceFuzzer::invokeTimedAction(const uint8_t *data, size_t size) {
     FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
     android::mediametrics::TimedAction timedAction;
-    std::atomic_int value = 0;
 
     while (fdp.remaining_bytes()) {
         timedAction.postIn(std::chrono::seconds(fdp.ConsumeIntegral<int32_t>()),
-                           [&value] { ++value; });
+                           [this] { ++mValue; });
         timedAction.size();
     }
 }
diff --git a/services/mediametrics/statsd_codec.cpp b/services/mediametrics/statsd_codec.cpp
index 8581437..17a3a5f 100644
--- a/services/mediametrics/statsd_codec.cpp
+++ b/services/mediametrics/statsd_codec.cpp
@@ -390,6 +390,48 @@
     }
     AStatsEvent_writeInt32(event, qpBMaxOri);
 
+    // int32_t configColorStandard = -1;
+    // if (item->getInt32("android.media.mediacodec.config-color-standard", &configColorStandard)) {
+    //     metrics_proto.set_config_color_standard(configColorStandard);
+    // }
+    // AStatsEvent_writeInt32(event, configColorStandard);
+
+    // int32_t configColorRange = -1;
+    // if (item->getInt32("android.media.mediacodec.config-color-range", &configColorRange)) {
+    //     metrics_proto.set_config_color_range(configColorRange);
+    // }
+    // AStatsEvent_writeInt32(event, configColorRange);
+
+    // int32_t configColorTransfer = -1;
+    // if (item->getInt32("android.media.mediacodec.config-color-transfer", &configColorTransfer)) {
+    //     metrics_proto.set_config_color_transfer(configColorTransfer);
+    // }
+    // AStatsEvent_writeInt32(event, configColorTransfer);
+
+    // int32_t parsedColorStandard = -1;
+    // if (item->getInt32("android.media.mediacodec.parsed-color-standard", &parsedColorStandard)) {
+    //     metrics_proto.set_parsed_color_standard(parsedColorStandard);
+    // }
+    // AStatsEvent_writeInt32(event, parsedColorStandard);
+
+    // int32_t parsedColorRange = -1;
+    // if (item->getInt32("android.media.mediacodec.parsed-color-range", &parsedColorRange)) {
+    //     metrics_proto.set_parsed_color_range(parsedColorRange);
+    // }
+    // AStatsEvent_writeInt32(event, parsedColorRange);
+
+    // int32_t parsedColorTransfer = -1;
+    // if (item->getInt32("android.media.mediacodec.parsed-color-transfer", &parsedColorTransfer)) {
+    //     metrics_proto.set_parsed_color_transfer(parsedColorTransfer);
+    // }
+    // AStatsEvent_writeInt32(event, parsedColorTransfer);
+
+    // int32_t hdrMetadataFlags = -1;
+    // if (item->getInt32("android.media.mediacodec.hdr-metadata-flags", &hdrMetadataFlags)) {
+    //     metrics_proto.set_hdr_metadata_flags(hdrMetadataFlags);
+    // }
+    // AStatsEvent_writeInt32(event, hdrMetadataFlags);
+
     int err = AStatsEvent_write(event);
     if (err < 0) {
       ALOGE("Failed to write codec metrics to statsd (%d)", err);
diff --git a/services/mediametrics/statsd_drm.cpp b/services/mediametrics/statsd_drm.cpp
index 287fb8d..e06a605 100644
--- a/services/mediametrics/statsd_drm.cpp
+++ b/services/mediametrics/statsd_drm.cpp
@@ -171,7 +171,7 @@
     std::vector<uint8_t> buf(str.length() / 4 * 3, 0);
     size_t size = buf.size();
     if (decodeBase64(buf.data(), &size, str.c_str()) && size <= buf.size()) {
-        buf.erase(buf.begin() + size, buf.end());
+        buf.erase(buf.begin() + (ptrdiff_t)size, buf.end());
         return buf;
     }
     return {};
diff --git a/services/mediametrics/tests/Android.bp b/services/mediametrics/tests/Android.bp
index 3baf739..f46fbad 100644
--- a/services/mediametrics/tests/Android.bp
+++ b/services/mediametrics/tests/Android.bp
@@ -33,6 +33,7 @@
         "libmediautils",
         "libutils",
         "mediametricsservice-aidl-cpp",
+        "packagemanager_aidl-cpp",
     ],
 
     header_libs: [
diff --git a/services/mediaresourcemanager/Android.bp b/services/mediaresourcemanager/Android.bp
index f31202b..5d80744 100644
--- a/services/mediaresourcemanager/Android.bp
+++ b/services/mediaresourcemanager/Android.bp
@@ -90,7 +90,7 @@
     ],
 
     static_libs: [
-        "resourceobserver_aidl_interface-V1-ndk_platform",
+        "resourceobserver_aidl_interface-V1-ndk",
     ],
 
     include_dirs: ["frameworks/av/include"],
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index 953686b..0167cba 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -671,11 +671,11 @@
         if (clients.size() == 0) {
             // if we are here, run the fourth pass to free one codec with the different type.
             if (secureCodec != NULL) {
-                MediaResource temp(MediaResource::Type::kNonSecureCodec, 1);
+                MediaResource temp(MediaResource::Type::kNonSecureCodec, secureCodec->subType, 1);
                 getClientForResource_l(callingPid, &temp, &clients);
             }
             if (nonSecureCodec != NULL) {
-                MediaResource temp(MediaResource::Type::kSecureCodec, 1);
+                MediaResource temp(MediaResource::Type::kSecureCodec, nonSecureCodec->subType, 1);
                 getClientForResource_l(callingPid, &temp, &clients);
             }
         }
diff --git a/services/mediaresourcemanager/fuzzer/mediaresourcemanager_fuzzer.cpp b/services/mediaresourcemanager/fuzzer/mediaresourcemanager_fuzzer.cpp
index 6690b16..844f9fc 100644
--- a/services/mediaresourcemanager/fuzzer/mediaresourcemanager_fuzzer.cpp
+++ b/services/mediaresourcemanager/fuzzer/mediaresourcemanager_fuzzer.cpp
@@ -226,33 +226,31 @@
         mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(kMinThreadPairs, kMaxThreadPairs);
     // Make even number of threads
     size_t numThreads = numThreadPairs * 2;
-    resourceThreadArgs threadArgs;
-    vector<MediaResourceParcel> mediaResource;
+    resourceThreadArgs threadArgs[numThreadPairs];
+    vector<MediaResourceParcel> mediaResource[numThreadPairs];
     pthread_t pt[numThreads];
-    int i;
-    for (i = 0; i < numThreads - 1; i += 2) {
-        threadArgs.pid = mFuzzedDataProvider->ConsumeIntegral<int32_t>();
-        threadArgs.uid = mFuzzedDataProvider->ConsumeIntegral<int32_t>();
+    for (int k = 0; k < numThreadPairs; ++k) {
+        threadArgs[k].pid = mFuzzedDataProvider->ConsumeIntegral<int32_t>();
+        threadArgs[k].uid = mFuzzedDataProvider->ConsumeIntegral<int32_t>();
         int32_t mediaResourceType = mFuzzedDataProvider->ConsumeIntegralInRange<int32_t>(
             kMinResourceType, kMaxResourceType);
         int32_t mediaResourceSubType = mFuzzedDataProvider->ConsumeIntegralInRange<int32_t>(
             kMinResourceType, kMaxResourceType);
         uint64_t mediaResourceValue = mFuzzedDataProvider->ConsumeIntegral<uint64_t>();
-        threadArgs.service = mService;
+        threadArgs[k].service = mService;
         shared_ptr<IResourceManagerClient> testClient =
-            ::ndk::SharedRefBase::make<TestClient>(threadArgs.pid, mService);
-        threadArgs.testClient = testClient;
-        threadArgs.testClientId = getId(testClient);
-        mediaResource.push_back(MediaResource(static_cast<MedResType>(mediaResourceType),
-                                              static_cast<MedResSubType>(mediaResourceSubType),
-                                              mediaResourceValue));
-        threadArgs.mediaResource = mediaResource;
-        pthread_create(&pt[i], nullptr, addResource, &threadArgs);
-        pthread_create(&pt[i + 1], nullptr, removeResource, &threadArgs);
-        mediaResource.clear();
+                ::ndk::SharedRefBase::make<TestClient>(threadArgs[k].pid, mService);
+        threadArgs[k].testClient = testClient;
+        threadArgs[k].testClientId = getId(testClient);
+        mediaResource[k].push_back(MediaResource(static_cast<MedResType>(mediaResourceType),
+                                                 static_cast<MedResSubType>(mediaResourceSubType),
+                                                 mediaResourceValue));
+        threadArgs[k].mediaResource = mediaResource[k];
+        pthread_create(&pt[2 * k], nullptr, addResource, &threadArgs[k]);
+        pthread_create(&pt[2 * k + 1], nullptr, removeResource, &threadArgs[k]);
     }
 
-    for (i = 0; i < numThreads; ++i) {
+    for (int i = 0; i < numThreads; ++i) {
         pthread_join(pt[i], nullptr);
     }
 
@@ -265,14 +263,14 @@
     int32_t mediaResourceSubType =
         mFuzzedDataProvider->ConsumeIntegralInRange<int32_t>(kMinResourceType, kMaxResourceType);
     uint64_t mediaResourceValue = mFuzzedDataProvider->ConsumeIntegral<uint64_t>();
-    mediaResource.push_back(MediaResource(static_cast<MedResType>(mediaResourceType),
-                                          static_cast<MedResSubType>(mediaResourceSubType),
-                                          mediaResourceValue));
+    vector<MediaResourceParcel> mediaRes;
+    mediaRes.push_back(MediaResource(static_cast<MedResType>(mediaResourceType),
+                                     static_cast<MedResSubType>(mediaResourceSubType),
+                                     mediaResourceValue));
     bool result;
-    mService->reclaimResource(pidZero, mediaResource, &result);
-    mService->removeResource(pidZero, getId(testClient), mediaResource);
+    mService->reclaimResource(pidZero, mediaRes, &result);
+    mService->removeResource(pidZero, getId(testClient), mediaRes);
     mService->removeClient(pidZero, getId(testClient));
-    mediaResource.clear();
 }
 
 void ResourceManagerServiceFuzzer::setServiceLog() {
diff --git a/services/mediaresourcemanager/test/Android.bp b/services/mediaresourcemanager/test/Android.bp
index ec4ba58..618626f 100644
--- a/services/mediaresourcemanager/test/Android.bp
+++ b/services/mediaresourcemanager/test/Android.bp
@@ -56,7 +56,7 @@
     test_suites: ["device-tests"],
     static_libs: [
         "libresourcemanagerservice",
-        "resourceobserver_aidl_interface-V1-ndk_platform",
+        "resourceobserver_aidl_interface-V1-ndk",
     ],
     shared_libs: [
         "libbinder",
diff --git a/services/mediatranscoding/Android.bp b/services/mediatranscoding/Android.bp
index a9fd14f..fa5eb4e 100644
--- a/services/mediatranscoding/Android.bp
+++ b/services/mediatranscoding/Android.bp
@@ -47,7 +47,7 @@
     ],
 
     static_libs: [
-        "mediatranscoding_aidl_interface-ndk_platform",
+        "mediatranscoding_aidl_interface-ndk",
     ],
 
     cflags: [
@@ -80,7 +80,7 @@
     ],
 
     static_libs: [
-        "mediatranscoding_aidl_interface-ndk_platform",
+        "mediatranscoding_aidl_interface-ndk",
     ],
 
     cflags: [
diff --git a/services/mediatranscoding/tests/Android.bp b/services/mediatranscoding/tests/Android.bp
index cb180ec..ae13656 100644
--- a/services/mediatranscoding/tests/Android.bp
+++ b/services/mediatranscoding/tests/Android.bp
@@ -34,8 +34,8 @@
     ],
 
     static_libs: [
-        "mediatranscoding_aidl_interface-ndk_platform",
-        "resourcemanager_aidl_interface-ndk_platform",
+        "mediatranscoding_aidl_interface-ndk",
+        "resourcemanager_aidl_interface-ndk",
         "libmediatranscodingservice",
     ],
 
diff --git a/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h b/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
index 0cb2fad..8e17f55 100644
--- a/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
+++ b/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
@@ -259,9 +259,7 @@
 static constexpr bool success = true;
 static constexpr bool fail = false;
 
-struct TestClientCallback : public BnTranscodingClientCallback,
-                            public EventTracker,
-                            public std::enable_shared_from_this<TestClientCallback> {
+struct TestClientCallback : public BnTranscodingClientCallback, public EventTracker {
     TestClientCallback(const char* packageName, int32_t id)
           : mClientId(id), mClientPid(PID(id)), mClientUid(UID(id)), mPackageName(packageName) {
         ALOGI("TestClientCallback %d created: pid %d, uid %d", id, PID(id), UID(id));
@@ -348,8 +346,8 @@
         ALOGD("registering %s with uid %d", packageName, mClientUid);
 
         std::shared_ptr<ITranscodingClient> client;
-        Status status =
-                service->registerClient(shared_from_this(), kClientName, packageName, &client);
+        Status status = service->registerClient(ref<TestClientCallback>(), kClientName, packageName,
+                                                &client);
 
         mClient = status.isOk() ? client : nullptr;
         return status;
diff --git a/services/minijail/Android.bp b/services/minijail/Android.bp
index 3a89e12..038197f 100644
--- a/services/minijail/Android.bp
+++ b/services/minijail/Android.bp
@@ -31,17 +31,6 @@
     export_include_dirs: ["."],
 }
 
-// By adding "vendor_available: true" to "libavservices_minijail", we don't
-// need to have "libavservices_minijail_vendor" any longer.
-// "libavservices_minijail_vendor" will be removed, once we replace it with
-// "libavservices_minijail" in all vendor modules. (b/146313710)
-cc_library_shared {
-    name: "libavservices_minijail_vendor",
-    vendor: true,
-    defaults: ["libavservices_minijail_defaults"],
-    export_include_dirs: ["."],
-}
-
 // Unit tests.
 cc_test {
     name: "libavservices_minijail_unittest",
diff --git a/services/minijail/OWNERS b/services/minijail/OWNERS
index 19f4f9f..9ebf41e 100644
--- a/services/minijail/OWNERS
+++ b/services/minijail/OWNERS
@@ -1,2 +1,2 @@
 jorgelo@google.com
-marcone@google.com
+essick@google.com
diff --git a/services/oboeservice/Android.bp b/services/oboeservice/Android.bp
index 4c58040..3563d66 100644
--- a/services/oboeservice/Android.bp
+++ b/services/oboeservice/Android.bp
@@ -68,6 +68,7 @@
         "aaudio-aidl-cpp",
         "framework-permission-aidl-cpp",
         "libaudioclient_aidl_conversion",
+        "packagemanager_aidl-cpp",
     ],
 
     export_shared_lib_headers: [
diff --git a/services/tuner/Android.bp b/services/tuner/Android.bp
index cd11c88..be74368 100644
--- a/services/tuner/Android.bp
+++ b/services/tuner/Android.bp
@@ -89,13 +89,14 @@
         "liblog",
         "libmedia",
         "libutils",
-        "tv_tuner_aidl_interface-ndk_platform",
-        "tv_tuner_resource_manager_aidl_interface-ndk_platform",
+        "packagemanager_aidl-cpp",
+        "tv_tuner_aidl_interface-ndk",
+        "tv_tuner_resource_manager_aidl_interface-ndk",
         "tv_tuner_resource_manager_aidl_interface-cpp",
     ],
 
     static_libs: [
-        "android.hardware.common.fmq-V1-ndk_platform",
+        "android.hardware.common.fmq-V1-ndk",
         "libaidlcommonsupport",
     ],
 
@@ -128,14 +129,11 @@
         "liblog",
         "libtunerservice",
         "libutils",
-        "tv_tuner_resource_manager_aidl_interface-ndk_platform",
+        "tv_tuner_aidl_interface-ndk",
+        "tv_tuner_resource_manager_aidl_interface-ndk",
         "tv_tuner_resource_manager_aidl_interface-cpp",
     ],
 
-    static_libs: [
-        "tv_tuner_aidl_interface-ndk_platform",
-    ],
-
     init_rc: ["mediatuner.rc"],
 
     cflags: [
diff --git a/services/tuner/TunerFilter.cpp b/services/tuner/TunerFilter.cpp
index 039fd31..ca82526 100644
--- a/services/tuner/TunerFilter.cpp
+++ b/services/tuner/TunerFilter.cpp
@@ -301,7 +301,7 @@
             break;
         }
         case TunerFilterSettings::isPassthrough: {
-            ip.filterSettings.bPassthrough(tunerSettings.isPassthrough);
+            ip.filterSettings.bPassthrough(tunerSettings.get<TunerFilterSettings::isPassthrough>());
             break;
         }
         default: {
@@ -345,7 +345,8 @@
             break;
         }
         case TunerFilterSettings::isPassthrough: {
-            tlv.filterSettings.bPassthrough(tunerSettings.isPassthrough);
+            tlv.filterSettings.bPassthrough(
+                    tunerSettings.get<TunerFilterSettings::isPassthrough>());
             break;
         }
         default: {